The data runs from March 2005 through end November 2016. It is daily.
The SPY is the S&P index and you have seen this one before.
The "RealVol" is realized volatility for the past 30 days i.e., the standard deviation of the returns on the S&P index in annualized form. See Dan's notebook on volatility.
The VIX is the CBOE volatility index. The current VIX index value quotes the expected annualized change in the S&P 500 index over the following 30 days, as computed from options-based theory and current options-market data. https://en.wikipedia.org/wiki/VIX
The VIXCM30 stands for the VIX 30 day volatility future, i.e., a measure of the VIX forward 30 days, i.e., from 30-60 days. Likewise the VIXCM60 is the 60-day forward future, i.e., a measure of the volatility between 60-90 days out in time. The five futures contracts constitute the "futures curve".
Imagine you are a trader and are interested in building a model for accurately forecasting the five futures prices, i.e., the five points in the volatility curve (VIXCM30, VIXCM60, VIXCM90, VIXCM120, VIXCM150). You can use all the data in the data frame above to come up with a forecasting model.
Devise a metric for this exercise. How would you convince someone with a good metric that the performance of your forecasting is highly accurate. There is no specific known metric, so feel free to look at existing research on volatility forecasting.
You may want to develop rolling forecasts, i.e., choose a reasonable look back period to fit the model and the a reasonable forward forecast. For example, you may want to use a 180-day look back to fit the model and then forecast the next weeks futures prices. (That is just an example, find your best approach.)
Importing Data, Functions and Packages
¶import pandas as pd
import matplotlib.pyplot as plt
from johansen_test import coint_johansen
from scipy import stats
%matplotlib inline
import numpy as np
from scipy import stats
from sklearn.model_selection import train_test_split
import statsmodels.api as sm # Time Series Analysis
from statsmodels.tsa.stattools import acf, pacf
from statsmodels.graphics.tsaplots import plot_acf, plot_pacf
from sklearn.metrics import mean_squared_error
import copy
import seaborn as sns
plt.style.use('ggplot')
plt.rcParams['figure.figsize'] = 15,8
from statsmodels.tsa.ar_model import AR
from statsmodels.tsa.arima_model import ARIMA
import scipy
from scipy import *
import math
from scipy import interpolate
import arch
from arch import arch_model
from statsmodels.tsa.vector_ar.vecm import VECM
from sklearn.model_selection import train_test_split
import warnings
warnings.filterwarnings("ignore")
from statsmodels.tsa.stattools import adfuller
def run_ad_fuller(X):
result = adfuller(X)
print('ADF Statistic: %f' % result[0])
print('p-value: %f' % result[1])
print('Critical Values:')
for key, value in result[4].items():
print('\t%s: %.3f' % (key, value))
if result[1] > 0.05:
print('\nWe fail to reject the Null Hypothesis (H0) -- the time series has a unit root and is not stationary')
else:
print('\nWe reject the Null Hypothesis (H0) -- the time series is stationary')
from statsmodels.tsa.arima_model import ARMA
from prettytable import PrettyTable as pt
import time
# evaluate an ARMA model
def evaluate_arma_model(X, p, q, metric='BIC'):
assert metric in ('AIC','BIC','HQIC','MSE')
try:
model = ARMA(X, order=(p,q))
r = model.fit(trend='nc', method='css-mle')
if metric == 'BIC': value = r.bic
elif metric == 'AIC': value = r.aic
elif metric == 'HQIC': value = r.hqic
elif metric == 'MSE': value = np.mean(r.sigma2) ** .5
return value
except:
return None
def run_grid_over_arma(X, p_lags, q_lags, metric='BIC'):
header = ['p\q'] + list(map(lambda q: 'q (%d)' % q, q_lags))
start_time = time.time()
table = pt(header)
bad = []
result = {}
for p in p_lags:
row = ['p(%d)' % p]
for q in q_lags:
#print(p,q)
value = evaluate_arma_model(X, p, q, metric=metric)
if value is None:
bad += [(p,q)]
row += ['n/a']
else:
row += ['%8.2F' % value]
result[(p,q)] = value
table.add_row(row)
print(row)
print(table)
end_time = time.time()
print('bad values: %s' % str(bad))
print('time required', end_time - start_time)
return result
from sklearn.metrics import mean_squared_error
def measure_error(actual, pred, label=None, show=True):
sq_error = mean_squared_error(actual,pred)
rmse = np.mean(sq_error)**.5
avg = np.mean(actual)
errors = {'RMSE': rmse, 'RMSE_pcent': rmse / avg, 'label':label}
if show:
for (k,v) in errors.items():
print('%s = %s' % (k,v))
return errors
def ts_plot(y, lags=None, figsize=(10, 8), style='bmh'):
if not isinstance(y, pd.Series):
y = pd.Series(y)
with plt.style.context(style):
fig = plt.figure(figsize=figsize)
layout = (3, 2)
ts_ax = plt.subplot2grid(layout, (0, 0), colspan=2)
acf_ax = plt.subplot2grid(layout, (1, 0))
pacf_ax = plt.subplot2grid(layout, (1, 1))
qq_ax = plt.subplot2grid(layout, (2, 0))
pp_ax = plt.subplot2grid(layout, (2, 1))
y.plot(ax=ts_ax)
ts_ax.set_title('Time Series Analysis Plots')
plot_acf(y, lags=lags, ax=acf_ax, alpha=0.5)
plot_pacf(y, lags=lags, ax=pacf_ax, alpha=0.5)
sm.qqplot(y, line='s', ax=qq_ax)
qq_ax.set_title('QQ Plot')
stats.probplot(y, sparams=(y.mean(), y.std()), plot=pp_ax)
plt.tight_layout()
run_ad_fuller(y)
return
from sklearn.metrics import confusion_matrix
def plot_confusion_matrix(actual, pred, show=True):
y_true = []
y_pred = []
last = actual[0]
for (a,p) in list(zip(actual, pred))[1:]:
y_true += [-1 + int(a - last > 0) * 2]
y_pred += [-1 + int(p - last > 0) * 2]
last = a
labels = ['tn', 'fp', 'fn', 'tp']
cm = confusion_matrix(y_true, y_pred)
print('Confusion Matrix\nRaw')
print(cm)
print('Normalized')
cm_norm = cm / np.sum(cm)
if show:
print(cm_norm)
plt.figure()
plt.imshow(cm, cmap=plt.cm.Blues)
plt.colorbar()
#tick_marks = np.arange(2)
plt.xticks([-0.5,0.5], ['negative','positive'])
plt.yticks([-0.5,0.5], ['negative','positive'])
plt.ylabel('Actual')
plt.xlabel('Predicted')
plt.title('Manual AR(p) Model Predicted vs. Actual')
return {'cm': cm, 'cm_norm': cm_norm}
from statsmodels.stats.diagnostic import acorr_ljungbox
def run_ljung_box(X,lags=40):
(_,p_values) = acorr_ljungbox(X, lags=lags, boxpierce=False)
print('Ljung-Box Test')
for n,p_value in enumerate(p_values):
if p_value < .05:
print('lag: %3d | p_value: %.4f | still has information' % (n+1, p_value))
df_data=pd.read_csv('vix_data.csv')
df_data.head()
| Date | SPY | RealVol | VIX | VIXCM30 | VIXCM60 | VIXCM90 | VIXCM120 | VIXCM150 | |
|---|---|---|---|---|---|---|---|---|---|
| 0 | 3/23/2005 | 92.578594 | 9.120305 | 14.06 | 14.506154 | 15.065909 | 15.454545 | 15.739375 | 15.955000 |
| 1 | 3/24/2005 | 92.689358 | 6.457921 | 13.42 | 14.225263 | 14.894773 | 15.242500 | 15.524375 | 15.768125 |
| 2 | 3/28/2005 | 92.823878 | 5.578653 | 13.75 | 14.398649 | 14.993636 | 15.320909 | 15.586250 | 15.820625 |
| 3 | 3/29/2005 | 92.206709 | 9.290422 | 14.49 | 14.931667 | 15.219773 | 15.458409 | 15.667813 | 15.864688 |
| 4 | 3/30/2005 | 93.512316 | 7.313855 | 13.64 | 14.428571 | 15.091364 | 15.398182 | 15.600312 | 15.764375 |
Below we can see how the VIX is mean reverting and skewed right (positively):
plt.rcParams['figure.figsize'] = 15,8
df_data.VIX.plot()
<matplotlib.axes._subplots.AxesSubplot at 0x10a045be0>
df_data.VIX.hist(bins=100)
<matplotlib.axes._subplots.AxesSubplot at 0x1c19a4fdd8>
First let's summarize the data:
len(df_data)
2943
df_data.describe()
| SPY | RealVol | VIX | VIXCM30 | VIXCM60 | VIXCM90 | VIXCM120 | VIXCM150 | |
|---|---|---|---|---|---|---|---|---|
| count | 2943.000000 | 2943.000000 | 2943.000000 | 2943.000000 | 2943.000000 | 2943.000000 | 2915.000000 | 509.000000 |
| mean | 132.905896 | 11.228967 | 19.586290 | 20.737151 | 21.419516 | 21.851063 | 22.277360 | 17.815324 |
| std | 41.630454 | 8.782778 | 9.433398 | 7.751411 | 6.942745 | 6.461391 | 6.149277 | 4.051598 |
| min | 58.030194 | 1.725931 | 9.890000 | 11.763684 | 12.900000 | 13.569474 | 14.047656 | 14.298125 |
| 25% | 102.141627 | 6.420053 | 13.455000 | 15.270175 | 16.291053 | 16.963577 | 17.662237 | 15.432500 |
| 50% | 118.316563 | 8.551970 | 16.670000 | 18.522917 | 19.500000 | 20.193421 | 20.660526 | 16.255079 |
| 75% | 170.156606 | 12.769802 | 22.535000 | 24.030250 | 24.680550 | 25.076158 | 25.684722 | 18.905000 |
| max | 221.520000 | 110.821685 | 80.860000 | 62.706364 | 54.465500 | 48.518947 | 45.061053 | 37.655000 |
We can make a few observations from the summary statistics above:
Now we will also make some plots to visualize the data:
df_data.set_index('Date', inplace=True)
df_data.head()
| SPY | RealVol | VIX | VIXCM30 | VIXCM60 | VIXCM90 | VIXCM120 | VIXCM150 | |
|---|---|---|---|---|---|---|---|---|
| Date | ||||||||
| 3/23/2005 | 92.578594 | 9.120305 | 14.06 | 14.506154 | 15.065909 | 15.454545 | 15.739375 | 15.955000 |
| 3/24/2005 | 92.689358 | 6.457921 | 13.42 | 14.225263 | 14.894773 | 15.242500 | 15.524375 | 15.768125 |
| 3/28/2005 | 92.823878 | 5.578653 | 13.75 | 14.398649 | 14.993636 | 15.320909 | 15.586250 | 15.820625 |
| 3/29/2005 | 92.206709 | 9.290422 | 14.49 | 14.931667 | 15.219773 | 15.458409 | 15.667813 | 15.864688 |
| 3/30/2005 | 93.512316 | 7.313855 | 13.64 | 14.428571 | 15.091364 | 15.398182 | 15.600312 | 15.764375 |
Plotting all variables together:
df_data.VIXCM60.plot(label='VIXCM60')
df_data.VIX.plot(label='VIX')
df_data.VIXCM90.plot(label='VIXCM90')
df_data.VIXCM30.plot(label='VIXCM30')
df_data.VIXCM120.plot(label='VIXCM120')
df_data.VIXCM150.plot(label='VIXCM150')
df_data.RealVol.plot(label='RealVol')
df_data.SPY.plot(label='SPY')
<matplotlib.axes._subplots.AxesSubplot at 0x1c19c342b0>
Above we can see that SPY is upward trending and all of the VIX variables as well as the realized volatility may be cointegrated. We can take closer looks below:
df_data.SPY.plot(label='SPY')
<matplotlib.axes._subplots.AxesSubplot at 0x1c19c68c50>
df_data.VIXCM60.plot(label='VIXCM60')
df_data.VIX.plot(label='VIX')
df_data.VIXCM90.plot(label='VIXCM90')
df_data.VIXCM30.plot(label='VIXCM30')
df_data.VIXCM120.plot(label='VIXCM120')
df_data.VIXCM150.plot(label='VIXCM150')
df_data.RealVol.plot(label='RealVol')
<matplotlib.axes._subplots.AxesSubplot at 0x1c1c1e6668>
df_data.VIXCM60.plot(label='VIXCM60')
df_data.VIX.plot(label='VIX')
df_data.VIXCM90.plot(label='VIXCM90')
df_data.VIXCM30.plot(label='VIXCM30')
df_data.VIXCM120.plot(label='VIXCM120')
df_data.VIXCM150.plot(label='VIXCM150')
<matplotlib.axes._subplots.AxesSubplot at 0x1c1c423940>
We can see these value seem to be moving together and are likely cointegrated.
As we saw in our data exploration, both VIXCM120 and VIXCM150 have missing values.
VIXCM120, has only a handful of missing values, so we can look at where these values appear in the data:
df_data.isna().sum()
SPY 0 RealVol 0 VIX 0 VIXCM30 0 VIXCM60 0 VIXCM90 0 VIXCM120 28 VIXCM150 2434 dtype: int64
Since this variable is only missing data from before 3/8/2006, we could only use data after this date to avoid any issues the missing values could cause. But let's look at where the missing values in VIXCM150 are:
df_data['VIXCM150'].plot()
<matplotlib.axes._subplots.AxesSubplot at 0x1c1c6f27b8>
We can see these missing values are spread all over the data so we can't simply use a portion of the original dataset to avoid this issue- instead we will need to find another way to address the missing values for this variable. We were able to find a way to extrapolate points on a curve which we can use to fill in missing VIXCM120 and VIXCM150 values.
First we will use all of the VIX, VIXCM30, VIXCM60, and VIXCM90 values on a given day to extrapolate the VIXCM120 values on the missing days. Then we will use all of the VIX, VIXCM30, VIXCM60, VIXCM90 and VIXCM120 values on a given day to extrapolate the VIXCM150 values on the missing days.
We ran many tests to see how this extrapolation was performing compared to real VIXCM120 and VIXCM150 scores and we very happy this how accurate they were, so we believe this is a secure way to
for date,row in df_data.iterrows():
if math.isnan(row['VIXCM120']):
x=[0,30,60,90]
y=[row['VIX'],row['VIXCM30'],row['VIXCM60'],row['VIXCM90']]
f = interpolate.interp1d(x, y, fill_value = "extrapolate")
new_y=float(f(120))
df_data.set_value(date, 'VIXCM120', new_y)
for date,row in df_data.iterrows():
if math.isnan(row['VIXCM150']):
x=[0,30,60,90,120]
y=[row['VIX'],row['VIXCM30'],row['VIXCM60'],row['VIXCM90'],row['VIXCM120']]
f = interpolate.interp1d(x, y, fill_value = "extrapolate")
new_y=float(f(150))
df_data.set_value(date, 'VIXCM150', new_y)
df_data.isna().sum()
Date 0 SPY 0 RealVol 0 VIX 0 VIXCM30 0 VIXCM60 0 VIXCM90 0 VIXCM120 0 VIXCM150 0 dtype: int64
We will measure the success of a model with by observing the daily PnLs. For any given day, we will advise a direction, to buy (+1) or to sell (-1) a given future. Then we will multiply the direction by $Future Price_{t}$ - $Future Price_{t-1}$.
For example, if the VIXCM30 was 20 yesterday and the VIXCM30 is 24 today, and our model predicted the VIXCM30 would increase, then the PnL we'd output would be $(24-20)*(+1) = 4$ but if our model predicted the VIXCM30 would have decreased, our PnL output would have been $(24-20)*(-1) = -4$.
def make_money(actual,predicted):
d = {'Predicted': predicted,
'Actual': actual}
PnL_df = pd.DataFrame(d)
Total_PnL =0
direction=0
correct=0
incorrect=0
for i in range(0, (len(PnL_df)-1)):
if PnL_df['Predicted'][i+1]>PnL_df['Actual'][i]:
direction=1
else:
direction=-1
daily_PnL= direction*(PnL_df.Actual[i+1]-PnL_df.Actual[i])
Total_PnL+=daily_PnL
if daily_PnL>=0:
correct+=1
else:
incorrect+=1
print('Total profit or loss: %.4f' % (Total_PnL))
print('Number of correct days: %d' % (correct))
print('total incorrect days: %d' % (incorrect))
With our ARCH and GARCH models we will try to predict the volatility of the SPY daily returns. Based on these predictions (volatility will increase or decrease), we can make a recommendation about whether to buy or sell each future.
First we will create a new variable for the SPY returns:
df_data['spy_ret'] = df_data['SPY'].pct_change()
ts_plot(df_data['spy_ret'].dropna(), lags = 20)
ADF Statistic: -12.501615 p-value: 0.000000 Critical Values: 1%: -3.433 5%: -2.863 10%: -2.567 We reject the Null Hypothesis (H0) -- the time series is stationary
Next we can compute the variance of the returns. This variable will be used to see how accurate our model is:
# compute variance
spy_daily_avg = df_data['spy_ret'].mean()
df_data['variance'] = (df_data['spy_ret'] - spy_daily_avg) ** 2
ts_plot(df_data['variance'].dropna(), lags=20)
ADF Statistic: -4.766926 p-value: 0.000063 Critical Values: 1%: -3.433 5%: -2.863 10%: -2.567 We reject the Null Hypothesis (H0) -- the time series is stationary
Now we can run our model. Please note that we have run a number of ARCH and GARCH models (with varying orders) using the following code. We have summarized the results in a table after the code, which will just be run for one of the models.
df_data.reset_index(inplace=True)
train,test = train_test_split(df_data, test_size=0.2, shuffle=False)
df_data['Date'] = pd.to_datetime(df_data['Date'])
lst = [0,29,59,89,119,149] # VIXCM nth values
H = train['spy_ret'].dropna()[-180:].tolist()
#H = train['spy_logret'].dropna()[-180:].tolist()
ACTUAL = []
PRED = []
for date,row in test.iterrows():
am = arch_model(H, vol='Arch', p=1, mean='Constant')
#am = arch_model(H, vol='Garch', p=3,q=2, dist='Normal',mean='Constant')
f = am.fit()
fc = f.forecast(horizon=150)
ACTUAL += [row['variance']]
PRED += [fc.variance.values[-1][lst]]
#print(date,ACTUAL[-1],PRED[-1])
H += [row['spy_ret']]
H = H[1:]
Iteration: 1, Func. Count: 5, Neg. LLF: -659.9420976322722
Iteration: 2, Func. Count: 17, Neg. LLF: -660.0753368434889
Optimization terminated successfully. (Exit mode 0)
Current function value: -660.0753377375784
Iterations: 3
Function evaluations: 25
Gradient evaluations: 2
Iteration: 1, Func. Count: 5, Neg. LLF: -662.3038993947875
Iteration: 2, Func. Count: 17, Neg. LLF: -662.4368207461009
Optimization terminated successfully. (Exit mode 0)
Current function value: -662.4368214190276
Iterations: 3
Function evaluations: 25
Gradient evaluations: 2
Iteration: 1, Func. Count: 5, Neg. LLF: -655.646871393028
Iteration: 2, Func. Count: 17, Neg. LLF: -655.7033109942909
Iteration: 3, Func. Count: 24, Neg. LLF: -655.7286575610116
Iteration: 4, Func. Count: 33, Neg. LLF: -655.7383757791745
Iteration: 5, Func. Count: 45, Neg. LLF: -655.7383822997087
Iteration: 6, Func. Count: 53, Neg. LLF: -655.7397939858729
Iteration: 7, Func. Count: 63, Neg. LLF: -655.7398118266389
Iteration: 8, Func. Count: 71, Neg. LLF: -655.7555757455966
Iteration: 9, Func. Count: 78, Neg. LLF: -655.7618562021332
Iteration: 10, Func. Count: 85, Neg. LLF: -655.7780134636225
Optimization terminated successfully. (Exit mode 0)
Current function value: -655.7780135141056
Iterations: 11
Function evaluations: 91
Gradient evaluations: 10
Iteration: 1, Func. Count: 5, Neg. LLF: -655.2328342761537
Iteration: 2, Func. Count: 17, Neg. LLF: -655.2901630573631
Iteration: 3, Func. Count: 25, Neg. LLF: -655.2913706138233
Iteration: 4, Func. Count: 33, Neg. LLF: -655.2943181243002
Iteration: 5, Func. Count: 42, Neg. LLF: -655.2977253321736
Iteration: 6, Func. Count: 49, Neg. LLF: -655.311452941711
Iteration: 7, Func. Count: 56, Neg. LLF: -655.3114718372431
Iteration: 8, Func. Count: 61, Neg. LLF: -655.3115187490898
Optimization terminated successfully. (Exit mode 0)
Current function value: -655.3115187491103
Iterations: 8
Function evaluations: 61
Gradient evaluations: 8
Iteration: 1, Func. Count: 5, Neg. LLF: -655.3916172522531
Iteration: 2, Func. Count: 17, Neg. LLF: -655.450785586859
Iteration: 3, Func. Count: 25, Neg. LLF: -655.4510087778469
Iteration: 4, Func. Count: 33, Neg. LLF: -655.4562012432839
Iteration: 5, Func. Count: 40, Neg. LLF: -655.4750025166768
Iteration: 6, Func. Count: 48, Neg. LLF: -655.475054695914
Iteration: 7, Func. Count: 53, Neg. LLF: -655.4751238131926
Optimization terminated successfully. (Exit mode 0)
Current function value: -655.4751243290468
Iterations: 7
Function evaluations: 55
Gradient evaluations: 7
Iteration: 1, Func. Count: 5, Neg. LLF: -654.3000340890056
Iteration: 2, Func. Count: 17, Neg. LLF: -654.3551449772781
Iteration: 3, Func. Count: 25, Neg. LLF: -654.3622395843736
Iteration: 4, Func. Count: 32, Neg. LLF: -654.3802359039512
Iteration: 5, Func. Count: 39, Neg. LLF: -654.3802382805384
Iteration: 6, Func. Count: 46, Neg. LLF: -654.380270876936
Optimization terminated successfully. (Exit mode 0)
Current function value: -654.3802708769763
Iterations: 6
Function evaluations: 46
Gradient evaluations: 6
Iteration: 1, Func. Count: 5, Neg. LLF: -654.330742040216
Iteration: 2, Func. Count: 17, Neg. LLF: -654.382317278534
Iteration: 3, Func. Count: 26, Neg. LLF: -654.3823288895812
Iteration: 4, Func. Count: 34, Neg. LLF: -654.3873314189232
Iteration: 5, Func. Count: 43, Neg. LLF: -654.3895391000727
Iteration: 6, Func. Count: 50, Neg. LLF: -654.3914319022268
Iteration: 7, Func. Count: 57, Neg. LLF: -654.3914708832925
Iteration: 8, Func. Count: 62, Neg. LLF: -654.3914956555268
Optimization terminated successfully. (Exit mode 0)
Current function value: -654.3914956555341
Iterations: 8
Function evaluations: 62
Gradient evaluations: 8
Iteration: 1, Func. Count: 5, Neg. LLF: -654.0680900349303
Iteration: 2, Func. Count: 17, Neg. LLF: -654.1194724827947
Optimization terminated successfully. (Exit mode 0)
Current function value: -654.1194724653178
Iterations: 3
Function evaluations: 26
Gradient evaluations: 2
Iteration: 1, Func. Count: 5, Neg. LLF: -652.6787825517026
Iteration: 2, Func. Count: 17, Neg. LLF: -652.7336654588219
Iteration: 3, Func. Count: 27, Neg. LLF: -652.7336673825902
Iteration: 4, Func. Count: 39, Neg. LLF: -652.7336731675352
Iteration: 5, Func. Count: 47, Neg. LLF: -652.7338865653126
Iteration: 6, Func. Count: 57, Neg. LLF: -652.7338936860979
Iteration: 7, Func. Count: 69, Neg. LLF: -652.733944285272
Iteration: 8, Func. Count: 77, Neg. LLF: -652.7340009741038
Iteration: 9, Func. Count: 87, Neg. LLF: -652.734181995491
Iteration: 10, Func. Count: 95, Neg. LLF: -652.7446197395984
Iteration: 11, Func. Count: 102, Neg. LLF: -652.7470880450417
Iteration: 12, Func. Count: 110, Neg. LLF: -652.7472135420189
Iteration: 13, Func. Count: 116, Neg. LLF: -652.747314124274
Iteration: 14, Func. Count: 121, Neg. LLF: -652.7473408847382
Iteration: 15, Func. Count: 126, Neg. LLF: -652.7473482494086
Optimization terminated successfully. (Exit mode 0)
Current function value: -652.7473482494088
Iterations: 17
Function evaluations: 126
Gradient evaluations: 15
Iteration: 1, Func. Count: 5, Neg. LLF: -652.6926809215587
Iteration: 2, Func. Count: 17, Neg. LLF: -652.7449078342412
Iteration: 3, Func. Count: 25, Neg. LLF: -652.751888295217
Iteration: 4, Func. Count: 34, Neg. LLF: -652.7535859687284
Iteration: 5, Func. Count: 42, Neg. LLF: -652.753762089377
Iteration: 6, Func. Count: 50, Neg. LLF: -652.7538116508761
Optimization terminated successfully. (Exit mode 0)
Current function value: -652.753811650862
Iterations: 7
Function evaluations: 61
Gradient evaluations: 6
Iteration: 1, Func. Count: 5, Neg. LLF: -653.3717631080954
Iteration: 2, Func. Count: 17, Neg. LLF: -653.4268053312135
Positive directional derivative for linesearch (Exit mode 8)
Current function value: -653.4268053133221
Iterations: 6
Function evaluations: 17
Gradient evaluations: 2
Iteration: 1, Func. Count: 5, Neg. LLF: -653.1205996925453
Iteration: 2, Func. Count: 17, Neg. LLF: -653.1756980444384
Iteration: 3, Func. Count: 25, Neg. LLF: -653.185206935576
Iteration: 4, Func. Count: 33, Neg. LLF: -653.1855298948992
Iteration: 5, Func. Count: 41, Neg. LLF: -653.1855397252682
Iteration: 6, Func. Count: 46, Neg. LLF: -653.1855443275228
Optimization terminated successfully. (Exit mode 0)
Current function value: -653.1855443275244
Iterations: 6
Function evaluations: 46
Gradient evaluations: 6
Iteration: 1, Func. Count: 5, Neg. LLF: -652.8880277736112
Iteration: 2, Func. Count: 17, Neg. LLF: -652.9443040753004
Iteration: 3, Func. Count: 25, Neg. LLF: -652.9512913092688
Iteration: 4, Func. Count: 34, Neg. LLF: -652.9539662079872
Iteration: 5, Func. Count: 41, Neg. LLF: -652.95400031495
Iteration: 6, Func. Count: 49, Neg. LLF: -652.9540516246801
Iteration: 7, Func. Count: 55, Neg. LLF: -652.9540545809866
Optimization terminated successfully. (Exit mode 0)
Current function value: -652.9540545811503
Iterations: 7
Function evaluations: 66
Gradient evaluations: 7
/Users/austincostello1/anaconda3/lib/python3.6/site-packages/arch/univariate/base.py:522: ConvergenceWarning: The optimizer returned code 8. The message is: Positive directional derivative for linesearch See scipy.optimize.fmin_slsqp for code meaning. ConvergenceWarning)
Iteration: 1, Func. Count: 5, Neg. LLF: -652.8470560679114
Iteration: 2, Func. Count: 17, Neg. LLF: -652.9047980862209
Iteration: 3, Func. Count: 25, Neg. LLF: -652.9132030394405
Iteration: 4, Func. Count: 34, Neg. LLF: -652.9147329200343
Iteration: 5, Func. Count: 41, Neg. LLF: -652.915463318544
Iteration: 6, Func. Count: 49, Neg. LLF: -652.9155194860964
Optimization terminated successfully. (Exit mode 0)
Current function value: -652.9155194860865
Iterations: 6
Function evaluations: 49
Gradient evaluations: 6
Iteration: 1, Func. Count: 5, Neg. LLF: -652.0762467825074
Iteration: 2, Func. Count: 17, Neg. LLF: -652.1071195416826
Iteration: 3, Func. Count: 26, Neg. LLF: -652.1076621772175
Iteration: 4, Func. Count: 33, Neg. LLF: -652.1526014125025
Iteration: 5, Func. Count: 41, Neg. LLF: -652.1527779284752
Iteration: 6, Func. Count: 46, Neg. LLF: -652.1529879321919
Optimization terminated successfully. (Exit mode 0)
Current function value: -652.1529879322479
Iterations: 6
Function evaluations: 46
Gradient evaluations: 6
Iteration: 1, Func. Count: 5, Neg. LLF: -651.8083581854055
Iteration: 2, Func. Count: 17, Neg. LLF: -651.8400072764684
Iteration: 3, Func. Count: 26, Neg. LLF: -651.8404942369202
Iteration: 4, Func. Count: 33, Neg. LLF: -651.874945906507
Iteration: 5, Func. Count: 42, Neg. LLF: -651.8751517410087
Iteration: 6, Func. Count: 47, Neg. LLF: -651.8752237767426
Optimization terminated successfully. (Exit mode 0)
Current function value: -651.8752237768247
Iterations: 6
Function evaluations: 47
Gradient evaluations: 6
Iteration: 1, Func. Count: 5, Neg. LLF: -651.9045345174331
Iteration: 2, Func. Count: 17, Neg. LLF: -651.9385393875501
Iteration: 3, Func. Count: 26, Neg. LLF: -651.9388898606351
Iteration: 4, Func. Count: 33, Neg. LLF: -651.9806663293878
Iteration: 5, Func. Count: 41, Neg. LLF: -651.9808033301526
Iteration: 6, Func. Count: 46, Neg. LLF: -651.981041529755
Optimization terminated successfully. (Exit mode 0)
Current function value: -651.9810415298115
Iterations: 6
Function evaluations: 46
Gradient evaluations: 6
Iteration: 1, Func. Count: 5, Neg. LLF: -652.1866631561422
Iteration: 2, Func. Count: 17, Neg. LLF: -652.2553018455044
Positive directional derivative for linesearch (Exit mode 8)
Current function value: -652.255301838319
Iterations: 6
Function evaluations: 17
Gradient evaluations: 2
Iteration: 1, Func. Count: 5, Neg. LLF: -652.1668330825432
Iteration: 2, Func. Count: 17, Neg. LLF: -652.2401795888227
Iteration: 3, Func. Count: 25, Neg. LLF: -652.2470896837203
Iteration: 4, Func. Count: 34, Neg. LLF: -652.2499254369105
Iteration: 5, Func. Count: 41, Neg. LLF: -652.2503234580743
Iteration: 6, Func. Count: 49, Neg. LLF: -652.2503705854149
Optimization terminated successfully. (Exit mode 0)
Current function value: -652.2503705854194
Iterations: 6
Function evaluations: 49
Gradient evaluations: 6
Iteration: 1, Func. Count: 5, Neg. LLF: -652.4054519966676
Iteration: 2, Func. Count: 17, Neg. LLF: -652.4668739241156
Iteration: 3, Func. Count: 26, Neg. LLF: -652.4668993117391
Iteration: 4, Func. Count: 35, Neg. LLF: -652.4687251838101
Iteration: 5, Func. Count: 43, Neg. LLF: -652.4726293166317
Iteration: 6, Func. Count: 52, Neg. LLF: -652.4732334398202
Iteration: 7, Func. Count: 59, Neg. LLF: -652.4757031138633
Iteration: 8, Func. Count: 67, Neg. LLF: -652.476840989589
Iteration: 9, Func. Count: 72, Neg. LLF: -652.4768781785262
Optimization terminated successfully. (Exit mode 0)
Current function value: -652.4768781785278
Iterations: 9
Function evaluations: 72
Gradient evaluations: 9
/Users/austincostello1/anaconda3/lib/python3.6/site-packages/arch/univariate/base.py:522: ConvergenceWarning: The optimizer returned code 8. The message is: Positive directional derivative for linesearch See scipy.optimize.fmin_slsqp for code meaning. ConvergenceWarning)
Iteration: 1, Func. Count: 5, Neg. LLF: -653.7659050297555
Iteration: 2, Func. Count: 17, Neg. LLF: -653.8339200601217
Iteration: 3, Func. Count: 25, Neg. LLF: -653.8340661641085
Iteration: 4, Func. Count: 33, Neg. LLF: -653.8368287400006
Iteration: 5, Func. Count: 41, Neg. LLF: -653.8417374193965
Iteration: 6, Func. Count: 48, Neg. LLF: -653.8423238619855
Iteration: 7, Func. Count: 56, Neg. LLF: -653.8430647547218
Iteration: 8, Func. Count: 61, Neg. LLF: -653.8434126869214
Optimization terminated successfully. (Exit mode 0)
Current function value: -653.8434126868774
Iterations: 8
Function evaluations: 61
Gradient evaluations: 8
Iteration: 1, Func. Count: 5, Neg. LLF: -653.8218614908806
Iteration: 2, Func. Count: 17, Neg. LLF: -653.893208460891
Iteration: 3, Func. Count: 26, Neg. LLF: -653.8933163918327
Iteration: 4, Func. Count: 34, Neg. LLF: -653.8968445258797
Iteration: 5, Func. Count: 43, Neg. LLF: -653.8993177530855
Iteration: 6, Func. Count: 50, Neg. LLF: -653.9049323382559
Iteration: 7, Func. Count: 58, Neg. LLF: -653.9049356127038
Iteration: 8, Func. Count: 63, Neg. LLF: -653.904950670782
Optimization terminated successfully. (Exit mode 0)
Current function value: -653.9049506707827
Iterations: 8
Function evaluations: 63
Gradient evaluations: 8
Iteration: 1, Func. Count: 5, Neg. LLF: -654.2775031840852
Iteration: 2, Func. Count: 17, Neg. LLF: -654.3438191817922
Iteration: 3, Func. Count: 24, Neg. LLF: -654.3748976068796
Iteration: 4, Func. Count: 32, Neg. LLF: -654.382716608664
Iteration: 5, Func. Count: 40, Neg. LLF: -654.3894044831445
Iteration: 6, Func. Count: 47, Neg. LLF: -654.3923874351515
Iteration: 7, Func. Count: 55, Neg. LLF: -654.3925100754279
Iteration: 8, Func. Count: 60, Neg. LLF: -654.3925340873183
Optimization terminated successfully. (Exit mode 0)
Current function value: -654.3925345583884
Iterations: 8
Function evaluations: 62
Gradient evaluations: 8
Iteration: 1, Func. Count: 5, Neg. LLF: -656.1309677044703
Iteration: 2, Func. Count: 17, Neg. LLF: -656.2240946360441
Positive directional derivative for linesearch (Exit mode 8)
Current function value: -656.2240946184189
Iterations: 6
Function evaluations: 17
Gradient evaluations: 2
Iteration: 1, Func. Count: 5, Neg. LLF: -656.3475186553014
Iteration: 2, Func. Count: 17, Neg. LLF: -656.4439621790955
Iteration: 3, Func. Count: 26, Neg. LLF: -656.4443720977329
Iteration: 4, Func. Count: 35, Neg. LLF: -656.4486198996442
Iteration: 5, Func. Count: 44, Neg. LLF: -656.4493039245647
Iteration: 6, Func. Count: 56, Neg. LLF: -656.4510582531769
Positive directional derivative for linesearch (Exit mode 8)
Current function value: -656.4510582368882
Iterations: 10
Function evaluations: 56
Gradient evaluations: 6
Iteration: 1, Func. Count: 5, Neg. LLF: -656.4275507400528
Iteration: 2, Func. Count: 17, Neg. LLF: -656.5140017245888
Iteration: 3, Func. Count: 26, Neg. LLF: -656.5185967945338
Iteration: 4, Func. Count: 34, Neg. LLF: -656.5317035188066
Iteration: 5, Func. Count: 42, Neg. LLF: -656.537977696685
Iteration: 6, Func. Count: 51, Neg. LLF: -656.5380521784421
Iteration: 7, Func. Count: 58, Neg. LLF: -656.5384777179393
Iteration: 8, Func. Count: 66, Neg. LLF: -656.5388948878749
Iteration: 9, Func. Count: 71, Neg. LLF: -656.5391280236668
Optimization terminated successfully. (Exit mode 0)
Current function value: -656.5391280236361
Iterations: 10
Function evaluations: 71
Gradient evaluations: 9
Iteration: 1, Func. Count: 5, Neg. LLF: -656.7588416212634
Iteration: 2, Func. Count: 17, Neg. LLF: -656.8483431955644
Positive directional derivative for linesearch (Exit mode 8)
Current function value: -656.8483431711688
Iterations: 6
Function evaluations: 17
Gradient evaluations: 2
/Users/austincostello1/anaconda3/lib/python3.6/site-packages/arch/univariate/base.py:522: ConvergenceWarning: The optimizer returned code 8. The message is: Positive directional derivative for linesearch See scipy.optimize.fmin_slsqp for code meaning. ConvergenceWarning) /Users/austincostello1/anaconda3/lib/python3.6/site-packages/arch/univariate/base.py:522: ConvergenceWarning: The optimizer returned code 8. The message is: Positive directional derivative for linesearch See scipy.optimize.fmin_slsqp for code meaning. ConvergenceWarning) /Users/austincostello1/anaconda3/lib/python3.6/site-packages/arch/univariate/base.py:522: ConvergenceWarning: The optimizer returned code 8. The message is: Positive directional derivative for linesearch See scipy.optimize.fmin_slsqp for code meaning. ConvergenceWarning)
Iteration: 1, Func. Count: 5, Neg. LLF: -657.5005382225817
Iteration: 2, Func. Count: 17, Neg. LLF: -657.5631829228405
Iteration: 3, Func. Count: 25, Neg. LLF: -657.6160720755558
Iteration: 4, Func. Count: 33, Neg. LLF: -657.6194177486296
Iteration: 5, Func. Count: 40, Neg. LLF: -657.6202661269544
Iteration: 6, Func. Count: 48, Neg. LLF: -657.6212855551237
Iteration: 7, Func. Count: 53, Neg. LLF: -657.6213314992976
Optimization terminated successfully. (Exit mode 0)
Current function value: -657.6213320053339
Iterations: 7
Function evaluations: 55
Gradient evaluations: 7
Iteration: 1, Func. Count: 5, Neg. LLF: -660.8030295819241
Iteration: 2, Func. Count: 17, Neg. LLF: -660.8612062282516
Iteration: 3, Func. Count: 25, Neg. LLF: -660.861611824671
Iteration: 4, Func. Count: 33, Neg. LLF: -660.8667693345633
Iteration: 5, Func. Count: 42, Neg. LLF: -660.8693858824538
Iteration: 6, Func. Count: 49, Neg. LLF: -660.9072184098063
Iteration: 7, Func. Count: 58, Neg. LLF: -660.9072449966604
Iteration: 8, Func. Count: 63, Neg. LLF: -660.9072733124492
Optimization terminated successfully. (Exit mode 0)
Current function value: -660.9072733124756
Iterations: 8
Function evaluations: 63
Gradient evaluations: 8
Iteration: 1, Func. Count: 5, Neg. LLF: -660.1572169625075
Iteration: 2, Func. Count: 17, Neg. LLF: -660.2106211711047
Iteration: 3, Func. Count: 25, Neg. LLF: -660.210988626591
Iteration: 4, Func. Count: 33, Neg. LLF: -660.216770861872
Iteration: 5, Func. Count: 42, Neg. LLF: -660.2203082867882
Iteration: 6, Func. Count: 49, Neg. LLF: -660.255652517247
Iteration: 7, Func. Count: 57, Neg. LLF: -660.2556607954765
Iteration: 8, Func. Count: 62, Neg. LLF: -660.2556975852559
Optimization terminated successfully. (Exit mode 0)
Current function value: -660.2556975852802
Iterations: 8
Function evaluations: 62
Gradient evaluations: 8
Iteration: 1, Func. Count: 5, Neg. LLF: -660.3320532461248
Iteration: 2, Func. Count: 17, Neg. LLF: -660.3798514557893
Iteration: 3, Func. Count: 25, Neg. LLF: -660.3883133013835
Iteration: 4, Func. Count: 32, Neg. LLF: -660.4141464890415
Iteration: 5, Func. Count: 41, Neg. LLF: -660.4141476923797
Iteration: 6, Func. Count: 46, Neg. LLF: -660.4141825416148
Optimization terminated successfully. (Exit mode 0)
Current function value: -660.4141825416422
Iterations: 6
Function evaluations: 46
Gradient evaluations: 6
Iteration: 1, Func. Count: 5, Neg. LLF: -660.6231341153925
Iteration: 2, Func. Count: 17, Neg. LLF: -660.6727945035133
Iteration: 3, Func. Count: 25, Neg. LLF: -660.680662287278
Iteration: 4, Func. Count: 32, Neg. LLF: -660.6894251618182
Iteration: 5, Func. Count: 39, Neg. LLF: -660.7063410006107
Iteration: 6, Func. Count: 47, Neg. LLF: -660.7064102343022
Iteration: 7, Func. Count: 52, Neg. LLF: -660.7064132307494
Optimization terminated successfully. (Exit mode 0)
Current function value: -660.7064132306159
Iterations: 7
Function evaluations: 63
Gradient evaluations: 7
Iteration: 1, Func. Count: 5, Neg. LLF: -660.0493624855969
Iteration: 2, Func. Count: 17, Neg. LLF: -660.0931453024308
Iteration: 3, Func. Count: 26, Neg. LLF: -660.0932014960817
Iteration: 4, Func. Count: 38, Neg. LLF: -660.0932601130977
Iteration: 5, Func. Count: 45, Neg. LLF: -660.0983403396945
Iteration: 6, Func. Count: 55, Neg. LLF: -660.0985312738595
Iteration: 7, Func. Count: 64, Neg. LLF: -660.1032107134483
Iteration: 8, Func. Count: 76, Neg. LLF: -660.106772968092
Iteration: 9, Func. Count: 84, Neg. LLF: -660.1163705402789
Iteration: 10, Func. Count: 91, Neg. LLF: -660.1205298917457
Iteration: 11, Func. Count: 98, Neg. LLF: -660.1220507108571
Iteration: 12, Func. Count: 103, Neg. LLF: -660.1229598646087
Iteration: 13, Func. Count: 110, Neg. LLF: -660.1229622918388
Optimization terminated successfully. (Exit mode 0)
Current function value: -660.1229622917242
Iterations: 15
Function evaluations: 110
Gradient evaluations: 13
Iteration: 1, Func. Count: 5, Neg. LLF: -660.1985093070596
Iteration: 2, Func. Count: 17, Neg. LLF: -660.2395942810831
Iteration: 3, Func. Count: 24, Neg. LLF: -660.2400789573148
Iteration: 4, Func. Count: 36, Neg. LLF: -660.2401139372516
Positive directional derivative for linesearch (Exit mode 8)
Current function value: -660.2401139134072
Iterations: 8
Function evaluations: 36
Gradient evaluations: 4
Iteration: 1, Func. Count: 5, Neg. LLF: -659.4560195330346
Iteration: 2, Func. Count: 17, Neg. LLF: -659.4997416573165
Iteration: 3, Func. Count: 26, Neg. LLF: -659.5009224971657
Iteration: 4, Func. Count: 35, Neg. LLF: -659.504412086683
Iteration: 5, Func. Count: 44, Neg. LLF: -659.5079353134158
Iteration: 6, Func. Count: 56, Neg. LLF: -659.5088849114567
Iteration: 7, Func. Count: 64, Neg. LLF: -659.5089982893812
Iteration: 8, Func. Count: 73, Neg. LLF: -659.5106710282314
Iteration: 9, Func. Count: 80, Neg. LLF: -659.5139590356489
Iteration: 10, Func. Count: 87, Neg. LLF: -659.5146103200287
Iteration: 11, Func. Count: 94, Neg. LLF: -659.5182142979273
Iteration: 12, Func. Count: 101, Neg. LLF: -659.5184419400191
Iteration: 13, Func. Count: 106, Neg. LLF: -659.5186368903434
Iteration: 14, Func. Count: 111, Neg. LLF: -659.5186735433208
Optimization terminated successfully. (Exit mode 0)
Current function value: -659.518673543316
Iterations: 16
Function evaluations: 111
Gradient evaluations: 14
Iteration: 1, Func. Count: 5, Neg. LLF: -659.3930773033558
Iteration: 2, Func. Count: 17, Neg. LLF: -659.435800469605
Positive directional derivative for linesearch (Exit mode 8)
Current function value: -659.435800449866
Iterations: 6
Function evaluations: 17
Gradient evaluations: 2
/Users/austincostello1/anaconda3/lib/python3.6/site-packages/arch/univariate/base.py:522: ConvergenceWarning: The optimizer returned code 8. The message is: Positive directional derivative for linesearch See scipy.optimize.fmin_slsqp for code meaning. ConvergenceWarning) /Users/austincostello1/anaconda3/lib/python3.6/site-packages/arch/univariate/base.py:522: ConvergenceWarning: The optimizer returned code 8. The message is: Positive directional derivative for linesearch See scipy.optimize.fmin_slsqp for code meaning. ConvergenceWarning)
Iteration: 1, Func. Count: 5, Neg. LLF: -659.5776210689435
Iteration: 2, Func. Count: 17, Neg. LLF: -659.6405081058016
Iteration: 3, Func. Count: 25, Neg. LLF: -659.6498544451362
Iteration: 4, Func. Count: 32, Neg. LLF: -659.6668815752219
Iteration: 5, Func. Count: 40, Neg. LLF: -659.6673872894473
Iteration: 6, Func. Count: 46, Neg. LLF: -659.6679996187197
Iteration: 7, Func. Count: 51, Neg. LLF: -659.6680040415179
Optimization terminated successfully. (Exit mode 0)
Current function value: -659.6680040415159
Iterations: 7
Function evaluations: 51
Gradient evaluations: 7
Iteration: 1, Func. Count: 5, Neg. LLF: -660.94979376385
Iteration: 2, Func. Count: 17, Neg. LLF: -661.0129741055507
Iteration: 3, Func. Count: 25, Neg. LLF: -661.0216044556266
Iteration: 4, Func. Count: 32, Neg. LLF: -661.0262543460963
Iteration: 5, Func. Count: 39, Neg. LLF: -661.0361382103742
Iteration: 6, Func. Count: 48, Neg. LLF: -661.0361421413608
Iteration: 7, Func. Count: 53, Neg. LLF: -661.0361575095683
Optimization terminated successfully. (Exit mode 0)
Current function value: -661.0361575095288
Iterations: 7
Function evaluations: 53
Gradient evaluations: 7
Iteration: 1, Func. Count: 5, Neg. LLF: -659.8837409512778
Iteration: 2, Func. Count: 17, Neg. LLF: -659.9502072252587
Optimization terminated successfully. (Exit mode 0)
Current function value: -659.9502072897734
Iterations: 2
Function evaluations: 24
Gradient evaluations: 2
Iteration: 1, Func. Count: 5, Neg. LLF: -659.5339019562297
Iteration: 2, Func. Count: 17, Neg. LLF: -659.5973975488157
Optimization terminated successfully. (Exit mode 0)
Current function value: -659.5973984699548
Iterations: 2
Function evaluations: 23
Gradient evaluations: 2
Iteration: 1, Func. Count: 5, Neg. LLF: -659.1988066725972
Iteration: 2, Func. Count: 17, Neg. LLF: -659.2607100999071
Optimization terminated successfully. (Exit mode 0)
Current function value: -659.2607100753769
Iterations: 2
Function evaluations: 26
Gradient evaluations: 2
Iteration: 1, Func. Count: 5, Neg. LLF: -655.8021559108395
Iteration: 2, Func. Count: 17, Neg. LLF: -655.8472654895512
Iteration: 3, Func. Count: 25, Neg. LLF: -655.8507894489
Iteration: 4, Func. Count: 37, Neg. LLF: -655.8603993662937
Iteration: 5, Func. Count: 45, Neg. LLF: -655.8661742425697
Optimization terminated successfully. (Exit mode 0)
Current function value: -655.8661743672712
Iterations: 7
Function evaluations: 53
Gradient evaluations: 5
Iteration: 1, Func. Count: 5, Neg. LLF: -655.0436978709222
Iteration: 2, Func. Count: 17, Neg. LLF: -655.0696426843193
Iteration: 3, Func. Count: 26, Neg. LLF: -655.0697052161916
Iteration: 4, Func. Count: 35, Neg. LLF: -655.0711767067841
Iteration: 5, Func. Count: 43, Neg. LLF: -655.0828128433426
Iteration: 6, Func. Count: 50, Neg. LLF: -655.0911165812582
Iteration: 7, Func. Count: 58, Neg. LLF: -655.0911328286559
Iteration: 8, Func. Count: 64, Neg. LLF: -655.0925077051585
Iteration: 9, Func. Count: 69, Neg. LLF: -655.0925104495407
Optimization terminated successfully. (Exit mode 0)
Current function value: -655.0925104495443
Iterations: 9
Function evaluations: 69
Gradient evaluations: 9
Iteration: 1, Func. Count: 5, Neg. LLF: -655.8378448819667
Iteration: 2, Func. Count: 17, Neg. LLF: -655.9219989316725
Iteration: 3, Func. Count: 26, Neg. LLF: -655.9220414887947
Iteration: 4, Func. Count: 35, Neg. LLF: -655.9228590820528
Iteration: 5, Func. Count: 44, Neg. LLF: -655.925783667435
Iteration: 6, Func. Count: 56, Neg. LLF: -655.9264987625086
Optimization terminated successfully. (Exit mode 0)
Current function value: -655.9264996647587
Iterations: 7
Function evaluations: 62
Gradient evaluations: 6
Iteration: 1, Func. Count: 5, Neg. LLF: -657.6172047154266
Iteration: 2, Func. Count: 17, Neg. LLF: -657.6876351008295
Iteration: 3, Func. Count: 26, Neg. LLF: -657.6902978981756
Iteration: 4, Func. Count: 38, Neg. LLF: -657.6928567215408
Iteration: 5, Func. Count: 46, Neg. LLF: -657.7021169668074
Iteration: 6, Func. Count: 55, Neg. LLF: -657.7205035925924
Iteration: 7, Func. Count: 63, Neg. LLF: -657.7315250892177
Iteration: 8, Func. Count: 72, Neg. LLF: -657.7330213361397
Iteration: 9, Func. Count: 79, Neg. LLF: -657.7341256243355
Iteration: 10, Func. Count: 86, Neg. LLF: -657.7342081525653
Iteration: 11, Func. Count: 91, Neg. LLF: -657.7342209027067
Optimization terminated successfully. (Exit mode 0)
Current function value: -657.7342209027092
Iterations: 12
Function evaluations: 91
Gradient evaluations: 11
Iteration: 1, Func. Count: 5, Neg. LLF: -655.9892348458227
Iteration: 2, Func. Count: 17, Neg. LLF: -656.0590018942496
Iteration: 3, Func. Count: 26, Neg. LLF: -656.0642219327943
Iteration: 4, Func. Count: 38, Neg. LLF: -656.0681600579908
Iteration: 5, Func. Count: 46, Neg. LLF: -656.0774570732628
Iteration: 6, Func. Count: 55, Neg. LLF: -656.0790705839521
Iteration: 7, Func. Count: 62, Neg. LLF: -656.0979807504824
Iteration: 8, Func. Count: 71, Neg. LLF: -656.0979892332143
Iteration: 9, Func. Count: 76, Neg. LLF: -656.0980105987001
Optimization terminated successfully. (Exit mode 0)
Current function value: -656.0980105987187
Iterations: 10
Function evaluations: 76
Gradient evaluations: 9
Iteration: 1, Func. Count: 5, Neg. LLF: -655.9042540899916
Iteration: 2, Func. Count: 17, Neg. LLF: -655.9721247197159
Optimization terminated successfully. (Exit mode 0)
Current function value: -655.9721247101422
Iterations: 3
Function evaluations: 26
Gradient evaluations: 2
Iteration: 1, Func. Count: 5, Neg. LLF: -654.4405482551299
Iteration: 2, Func. Count: 17, Neg. LLF: -654.4851644066307
Iteration: 3, Func. Count: 25, Neg. LLF: -654.4894933490531
Iteration: 4, Func. Count: 32, Neg. LLF: -654.5311277541294
Iteration: 5, Func. Count: 41, Neg. LLF: -654.5312516979479
Iteration: 6, Func. Count: 46, Neg. LLF: -654.5312592957482
Optimization terminated successfully. (Exit mode 0)
Current function value: -654.5312592957519
Iterations: 6
Function evaluations: 46
Gradient evaluations: 6
Iteration: 1, Func. Count: 5, Neg. LLF: -654.5182585181257
Iteration: 2, Func. Count: 17, Neg. LLF: -654.5657937069122
Iteration: 3, Func. Count: 25, Neg. LLF: -654.5663402450866
Iteration: 4, Func. Count: 37, Neg. LLF: -654.5668412610971
Iteration: 5, Func. Count: 44, Neg. LLF: -654.5727006585873
Iteration: 6, Func. Count: 55, Neg. LLF: -654.5727028954988
Iteration: 7, Func. Count: 67, Neg. LLF: -654.5727077336569
Optimization terminated successfully. (Exit mode 0)
Current function value: -654.5727077598408
Iterations: 9
Function evaluations: 74
Gradient evaluations: 7
Iteration: 1, Func. Count: 5, Neg. LLF: -651.1112329638283
Iteration: 2, Func. Count: 17, Neg. LLF: -651.1331228514488
Iteration: 3, Func. Count: 25, Neg. LLF: -651.1341130783869
Iteration: 4, Func. Count: 32, Neg. LLF: -651.152273129842
Iteration: 5, Func. Count: 40, Neg. LLF: -651.152648565452
Iteration: 6, Func. Count: 48, Neg. LLF: -651.153704766913
Iteration: 7, Func. Count: 53, Neg. LLF: -651.1540469025593
Iteration: 8, Func. Count: 58, Neg. LLF: -651.1540645421729
Optimization terminated successfully. (Exit mode 0)
Current function value: -651.1540645421821
Iterations: 8
Function evaluations: 58
Gradient evaluations: 8
Iteration: 1, Func. Count: 5, Neg. LLF: -649.204720775396
Iteration: 2, Func. Count: 17, Neg. LLF: -649.2698276097943
Optimization terminated successfully. (Exit mode 0)
Current function value: -649.2698280637319
Iterations: 2
Function evaluations: 23
Gradient evaluations: 2
Iteration: 1, Func. Count: 5, Neg. LLF: -647.8648840508957
Iteration: 2, Func. Count: 17, Neg. LLF: -647.9344547181375
Iteration: 3, Func. Count: 25, Neg. LLF: -647.9419209023314
Iteration: 4, Func. Count: 32, Neg. LLF: -647.9497555301346
Iteration: 5, Func. Count: 39, Neg. LLF: -647.9648216657733
Iteration: 6, Func. Count: 47, Neg. LLF: -647.9648476529103
Iteration: 7, Func. Count: 52, Neg. LLF: -647.9648619540646
Optimization terminated successfully. (Exit mode 0)
Current function value: -647.9648619540735
Iterations: 7
Function evaluations: 52
Gradient evaluations: 7
Iteration: 1, Func. Count: 5, Neg. LLF: -651.4129660627998
Iteration: 2, Func. Count: 17, Neg. LLF: -651.4575889595142
Iteration: 3, Func. Count: 28, Neg. LLF: -651.4575904571143
Optimization terminated successfully. (Exit mode 0)
Current function value: -651.4575904335206
Iterations: 5
Function evaluations: 37
Gradient evaluations: 3
Iteration: 1, Func. Count: 5, Neg. LLF: -649.3859898771539
Iteration: 2, Func. Count: 17, Neg. LLF: -649.5104122355534
Iteration: 3, Func. Count: 26, Neg. LLF: -649.5108472356878
Iteration: 4, Func. Count: 33, Neg. LLF: -649.5187741558833
Iteration: 5, Func. Count: 41, Neg. LLF: -649.5187779292498
Iteration: 6, Func. Count: 46, Neg. LLF: -649.5188578227564
Optimization terminated successfully. (Exit mode 0)
Current function value: -649.5188578227702
Iterations: 6
Function evaluations: 46
Gradient evaluations: 6
Iteration: 1, Func. Count: 5, Neg. LLF: -649.4320671307988
Iteration: 2, Func. Count: 17, Neg. LLF: -649.5770448825115
Iteration: 3, Func. Count: 26, Neg. LLF: -649.577118372875
Iteration: 4, Func. Count: 33, Neg. LLF: -649.578102911356
Iteration: 5, Func. Count: 38, Neg. LLF: -649.5781516380657
Iteration: 6, Func. Count: 44, Neg. LLF: -649.578230463566
Iteration: 7, Func. Count: 49, Neg. LLF: -649.5782586700102
Optimization terminated successfully. (Exit mode 0)
Current function value: -649.5782586700732
Iterations: 7
Function evaluations: 49
Gradient evaluations: 7
Iteration: 1, Func. Count: 5, Neg. LLF: -649.8110083626044
Iteration: 2, Func. Count: 17, Neg. LLF: -649.9318119205432
Iteration: 3, Func. Count: 29, Neg. LLF: -649.9318146518306
Optimization terminated successfully. (Exit mode 0)
Current function value: -649.9318150977472
Iterations: 5
Function evaluations: 37
Gradient evaluations: 3
Iteration: 1, Func. Count: 5, Neg. LLF: -650.805578132557
Iteration: 2, Func. Count: 17, Neg. LLF: -650.9326486836682
Iteration: 3, Func. Count: 25, Neg. LLF: -650.937820495724
Iteration: 4, Func. Count: 33, Neg. LLF: -650.9384003660639
Iteration: 5, Func. Count: 39, Neg. LLF: -650.9419266747137
Iteration: 6, Func. Count: 48, Neg. LLF: -650.9419372375771
Iteration: 7, Func. Count: 54, Neg. LLF: -650.946890739448
Iteration: 8, Func. Count: 59, Neg. LLF: -650.9468919094435
Optimization terminated successfully. (Exit mode 0)
Current function value: -650.9468919094534
Iterations: 8
Function evaluations: 59
Gradient evaluations: 8
Iteration: 1, Func. Count: 5, Neg. LLF: -650.4885855609117
Iteration: 2, Func. Count: 17, Neg. LLF: -650.5533213306016
Iteration: 3, Func. Count: 26, Neg. LLF: -650.553568046112
Iteration: 4, Func. Count: 33, Neg. LLF: -650.5539672212103
Iteration: 5, Func. Count: 38, Neg. LLF: -650.554126079099
Iteration: 6, Func. Count: 44, Neg. LLF: -650.5543007638967
Iteration: 7, Func. Count: 49, Neg. LLF: -650.5543643864472
Optimization terminated successfully. (Exit mode 0)
Current function value: -650.5543643865449
Iterations: 7
Function evaluations: 49
Gradient evaluations: 7
Iteration: 1, Func. Count: 5, Neg. LLF: -655.3122475582609
Iteration: 2, Func. Count: 17, Neg. LLF: -655.3419618895005
Positive directional derivative for linesearch (Exit mode 8)
Current function value: -655.3419618740463
Iterations: 6
Function evaluations: 17
Gradient evaluations: 2
/Users/austincostello1/anaconda3/lib/python3.6/site-packages/arch/univariate/base.py:522: ConvergenceWarning: The optimizer returned code 8. The message is: Positive directional derivative for linesearch See scipy.optimize.fmin_slsqp for code meaning. ConvergenceWarning) /Users/austincostello1/anaconda3/lib/python3.6/site-packages/arch/univariate/base.py:522: ConvergenceWarning: The optimizer returned code 8. The message is: Positive directional derivative for linesearch See scipy.optimize.fmin_slsqp for code meaning. ConvergenceWarning) /Users/austincostello1/anaconda3/lib/python3.6/site-packages/arch/univariate/base.py:522: ConvergenceWarning: The optimizer returned code 8. The message is: Positive directional derivative for linesearch See scipy.optimize.fmin_slsqp for code meaning. ConvergenceWarning)
Iteration: 1, Func. Count: 5, Neg. LLF: -652.2331023640592
Iteration: 2, Func. Count: 17, Neg. LLF: -652.2380165608331
Iteration: 3, Func. Count: 26, Neg. LLF: -652.2380535425148
Iteration: 4, Func. Count: 33, Neg. LLF: -652.2387175499433
Iteration: 5, Func. Count: 40, Neg. LLF: -652.238729241707
Iteration: 6, Func. Count: 46, Neg. LLF: -652.239046132257
Optimization terminated successfully. (Exit mode 0)
Current function value: -652.2390461321772
Iterations: 6
Function evaluations: 57
Gradient evaluations: 6
Iteration: 1, Func. Count: 5, Neg. LLF: -651.9119785494333
Iteration: 2, Func. Count: 17, Neg. LLF: -651.9179554252898
Positive directional derivative for linesearch (Exit mode 8)
Current function value: -651.9179554111367
Iterations: 6
Function evaluations: 17
Gradient evaluations: 2
Iteration: 1, Func. Count: 5, Neg. LLF: -651.9881063656178
Iteration: 2, Func. Count: 17, Neg. LLF: -652.0078668093067
Positive directional derivative for linesearch (Exit mode 8)
Current function value: -652.0078667972509
Iterations: 6
Function evaluations: 17
Gradient evaluations: 2
Iteration: 1, Func. Count: 5, Neg. LLF: -653.2479412139303
Iteration: 2, Func. Count: 17, Neg. LLF: -653.2632103846931
Positive directional derivative for linesearch (Exit mode 8)
Current function value: -653.2632103760111
Iterations: 6
Function evaluations: 17
Gradient evaluations: 2
/Users/austincostello1/anaconda3/lib/python3.6/site-packages/arch/univariate/base.py:522: ConvergenceWarning: The optimizer returned code 8. The message is: Positive directional derivative for linesearch See scipy.optimize.fmin_slsqp for code meaning. ConvergenceWarning) /Users/austincostello1/anaconda3/lib/python3.6/site-packages/arch/univariate/base.py:522: ConvergenceWarning: The optimizer returned code 8. The message is: Positive directional derivative for linesearch See scipy.optimize.fmin_slsqp for code meaning. ConvergenceWarning)
Iteration: 1, Func. Count: 5, Neg. LLF: -653.3754468787081
Iteration: 2, Func. Count: 17, Neg. LLF: -653.3913995315455
Positive directional derivative for linesearch (Exit mode 8)
Current function value: -653.3913995160278
Iterations: 6
Function evaluations: 17
Gradient evaluations: 2
Iteration: 1, Func. Count: 5, Neg. LLF: -653.2146230034845
Iteration: 2, Func. Count: 17, Neg. LLF: -653.2346889871287
Iteration: 3, Func. Count: 24, Neg. LLF: -653.251770072003
Iteration: 4, Func. Count: 33, Neg. LLF: -653.25177366973
Iteration: 5, Func. Count: 41, Neg. LLF: -653.2524165116581
Iteration: 6, Func. Count: 49, Neg. LLF: -653.2524192578003
Optimization terminated successfully. (Exit mode 0)
Current function value: -653.2524193428462
Iterations: 6
Function evaluations: 53
Gradient evaluations: 6
Iteration: 1, Func. Count: 5, Neg. LLF: -652.9106837116921
Iteration: 2, Func. Count: 17, Neg. LLF: -652.9343226959288
Positive directional derivative for linesearch (Exit mode 8)
Current function value: -652.9343226912692
Iterations: 6
Function evaluations: 17
Gradient evaluations: 2
/Users/austincostello1/anaconda3/lib/python3.6/site-packages/arch/univariate/base.py:522: ConvergenceWarning: The optimizer returned code 8. The message is: Positive directional derivative for linesearch See scipy.optimize.fmin_slsqp for code meaning. ConvergenceWarning) /Users/austincostello1/anaconda3/lib/python3.6/site-packages/arch/univariate/base.py:522: ConvergenceWarning: The optimizer returned code 8. The message is: Positive directional derivative for linesearch See scipy.optimize.fmin_slsqp for code meaning. ConvergenceWarning) /Users/austincostello1/anaconda3/lib/python3.6/site-packages/arch/univariate/base.py:522: ConvergenceWarning: The optimizer returned code 8. The message is: Positive directional derivative for linesearch See scipy.optimize.fmin_slsqp for code meaning. ConvergenceWarning)
Iteration: 1, Func. Count: 5, Neg. LLF: -652.7491939350816
Iteration: 2, Func. Count: 17, Neg. LLF: -652.7731091885528
Positive directional derivative for linesearch (Exit mode 8)
Current function value: -652.7731091827466
Iterations: 6
Function evaluations: 17
Gradient evaluations: 2
Iteration: 1, Func. Count: 5, Neg. LLF: -651.6128800720891
Iteration: 2, Func. Count: 17, Neg. LLF: -651.6266625389292
Positive directional derivative for linesearch (Exit mode 8)
Current function value: -651.6266625276852
Iterations: 6
Function evaluations: 17
Gradient evaluations: 2
Iteration: 1, Func. Count: 5, Neg. LLF: -651.4573029624041
Iteration: 2, Func. Count: 17, Neg. LLF: -651.4789576954627
Iteration: 3, Func. Count: 24, Neg. LLF: -651.4830253536446
Iteration: 4, Func. Count: 32, Neg. LLF: -651.4834822860888
Iteration: 5, Func. Count: 40, Neg. LLF: -651.483621980308
Iteration: 6, Func. Count: 46, Neg. LLF: -651.4836478179772
Iteration: 7, Func. Count: 52, Neg. LLF: -651.4837075490652
Optimization terminated successfully. (Exit mode 0)
Current function value: -651.4837075491434
Iterations: 7
Function evaluations: 52
Gradient evaluations: 7
Iteration: 1, Func. Count: 5, Neg. LLF: -651.9728298700384
Iteration: 2, Func. Count: 17, Neg. LLF: -651.9937135975722
Iteration: 3, Func. Count: 24, Neg. LLF: -652.0000857242619
Iteration: 4, Func. Count: 32, Neg. LLF: -652.0003187753914
Iteration: 5, Func. Count: 41, Neg. LLF: -652.0005319126124
Iteration: 6, Func. Count: 48, Neg. LLF: -652.0005513840471
Iteration: 7, Func. Count: 53, Neg. LLF: -652.0005701435123
Optimization terminated successfully. (Exit mode 0)
Current function value: -652.0005701434546
Iterations: 7
Function evaluations: 53
Gradient evaluations: 7
Iteration: 1, Func. Count: 5, Neg. LLF: -651.9101764278967
Iteration: 2, Func. Count: 17, Neg. LLF: -651.9294112991453
Positive directional derivative for linesearch (Exit mode 8)
Current function value: -651.9294112912785
Iterations: 6
Function evaluations: 17
Gradient evaluations: 2
Iteration: 1, Func. Count: 5, Neg. LLF: -651.7902079282007
Iteration: 2, Func. Count: 17, Neg. LLF: -651.8081029285551
Iteration: 3, Func. Count: 24, Neg. LLF: -651.8133226053449
Iteration: 4, Func. Count: 32, Neg. LLF: -651.8137059702638
Iteration: 5, Func. Count: 40, Neg. LLF: -651.8138693179117
Iteration: 6, Func. Count: 47, Neg. LLF: -651.8138771394828
Optimization terminated successfully. (Exit mode 0)
Current function value: -651.8138773970876
Iterations: 6
Function evaluations: 50
Gradient evaluations: 6
Iteration: 1, Func. Count: 5, Neg. LLF: -652.1031474216044
Iteration: 2, Func. Count: 17, Neg. LLF: -652.1222280746268
Positive directional derivative for linesearch (Exit mode 8)
Current function value: -652.1222280682235
Iterations: 6
Function evaluations: 17
Gradient evaluations: 2
/Users/austincostello1/anaconda3/lib/python3.6/site-packages/arch/univariate/base.py:522: ConvergenceWarning: The optimizer returned code 8. The message is: Positive directional derivative for linesearch See scipy.optimize.fmin_slsqp for code meaning. ConvergenceWarning) /Users/austincostello1/anaconda3/lib/python3.6/site-packages/arch/univariate/base.py:522: ConvergenceWarning: The optimizer returned code 8. The message is: Positive directional derivative for linesearch See scipy.optimize.fmin_slsqp for code meaning. ConvergenceWarning)
Iteration: 1, Func. Count: 5, Neg. LLF: -652.0268881555879
Iteration: 2, Func. Count: 17, Neg. LLF: -652.0474215800714
Iteration: 3, Func. Count: 28, Neg. LLF: -652.0474237076453
Iteration: 4, Func. Count: 35, Neg. LLF: -652.0527320519609
Iteration: 5, Func. Count: 43, Neg. LLF: -652.0532060313622
Iteration: 6, Func. Count: 51, Neg. LLF: -652.0533784849108
Optimization terminated successfully. (Exit mode 0)
Current function value: -652.0533787118105
Iterations: 7
Function evaluations: 56
Gradient evaluations: 6
Iteration: 1, Func. Count: 5, Neg. LLF: -652.0785116050292
Iteration: 2, Func. Count: 17, Neg. LLF: -652.097662519341
Iteration: 3, Func. Count: 24, Neg. LLF: -652.1061183056985
Iteration: 4, Func. Count: 32, Neg. LLF: -652.1062797868674
Iteration: 5, Func. Count: 41, Neg. LLF: -652.1064003039751
Iteration: 6, Func. Count: 47, Neg. LLF: -652.1064348042253
Iteration: 7, Func. Count: 52, Neg. LLF: -652.1064420793629
Iteration: 8, Func. Count: 57, Neg. LLF: -652.1064461653597
Optimization terminated successfully. (Exit mode 0)
Current function value: -652.1064461653598
Iterations: 8
Function evaluations: 57
Gradient evaluations: 8
Iteration: 1, Func. Count: 5, Neg. LLF: -652.3388815364298
Iteration: 2, Func. Count: 17, Neg. LLF: -652.3567212511939
Positive directional derivative for linesearch (Exit mode 8)
Current function value: -652.3567212418902
Iterations: 6
Function evaluations: 17
Gradient evaluations: 2
/Users/austincostello1/anaconda3/lib/python3.6/site-packages/arch/univariate/base.py:522: ConvergenceWarning: The optimizer returned code 8. The message is: Positive directional derivative for linesearch See scipy.optimize.fmin_slsqp for code meaning. ConvergenceWarning) /Users/austincostello1/anaconda3/lib/python3.6/site-packages/arch/univariate/base.py:522: ConvergenceWarning: The optimizer returned code 8. The message is: Positive directional derivative for linesearch See scipy.optimize.fmin_slsqp for code meaning. ConvergenceWarning) /Users/austincostello1/anaconda3/lib/python3.6/site-packages/arch/univariate/base.py:522: ConvergenceWarning: The optimizer returned code 8. The message is: Positive directional derivative for linesearch See scipy.optimize.fmin_slsqp for code meaning. ConvergenceWarning)
Iteration: 1, Func. Count: 5, Neg. LLF: -652.5722148391625
Iteration: 2, Func. Count: 17, Neg. LLF: -652.5956206231187
Positive directional derivative for linesearch (Exit mode 8)
Current function value: -652.5956206053556
Iterations: 6
Function evaluations: 17
Gradient evaluations: 2
Iteration: 1, Func. Count: 5, Neg. LLF: -653.2962572723194
Iteration: 2, Func. Count: 17, Neg. LLF: -653.3092197733765
Iteration: 3, Func. Count: 24, Neg. LLF: -653.3367719909941
Iteration: 4, Func. Count: 33, Neg. LLF: -653.3367733334817
Iteration: 5, Func. Count: 42, Neg. LLF: -653.3368380069531
Optimization terminated successfully. (Exit mode 0)
Current function value: -653.3368380068146
Iterations: 5
Function evaluations: 53
Gradient evaluations: 5
Iteration: 1, Func. Count: 5, Neg. LLF: -655.8052338066152
Iteration: 2, Func. Count: 17, Neg. LLF: -655.8200536396405
Positive directional derivative for linesearch (Exit mode 8)
Current function value: -655.8200536238257
Iterations: 6
Function evaluations: 17
Gradient evaluations: 2
Iteration: 1, Func. Count: 5, Neg. LLF: -655.4006152295103
Iteration: 2, Func. Count: 17, Neg. LLF: -655.4156193901697
Iteration: 3, Func. Count: 27, Neg. LLF: -655.4156223105838
Iteration: 4, Func. Count: 34, Neg. LLF: -655.4398831085227
Iteration: 5, Func. Count: 42, Neg. LLF: -655.4404771956533
Iteration: 6, Func. Count: 50, Neg. LLF: -655.4422133819959
Iteration: 7, Func. Count: 58, Neg. LLF: -655.4428648677504
Iteration: 8, Func. Count: 67, Neg. LLF: -655.4428691720193
Optimization terminated successfully. (Exit mode 0)
Current function value: -655.4428691719502
Iterations: 9
Function evaluations: 78
Gradient evaluations: 8
Iteration: 1, Func. Count: 5, Neg. LLF: -655.2744307548804
Iteration: 2, Func. Count: 17, Neg. LLF: -655.2917876585584
Positive directional derivative for linesearch (Exit mode 8)
Current function value: -655.2917876488714
Iterations: 6
Function evaluations: 17
Gradient evaluations: 2
Iteration: 1, Func. Count: 5, Neg. LLF: -655.2362656801872
Iteration: 2, Func. Count: 17, Neg. LLF: -655.2532753665489
Iteration: 3, Func. Count: 24, Neg. LLF: -655.2614210283616
Iteration: 4, Func. Count: 32, Neg. LLF: -655.2665218170839
Iteration: 5, Func. Count: 39, Neg. LLF: -655.2678565708708
Iteration: 6, Func. Count: 46, Neg. LLF: -655.2704850000478
Iteration: 7, Func. Count: 55, Neg. LLF: -655.2707097769617
Iteration: 8, Func. Count: 60, Neg. LLF: -655.2707690764531
Optimization terminated successfully. (Exit mode 0)
Current function value: -655.2707690764853
Iterations: 8
Function evaluations: 60
Gradient evaluations: 8
/Users/austincostello1/anaconda3/lib/python3.6/site-packages/arch/univariate/base.py:522: ConvergenceWarning: The optimizer returned code 8. The message is: Positive directional derivative for linesearch See scipy.optimize.fmin_slsqp for code meaning. ConvergenceWarning) /Users/austincostello1/anaconda3/lib/python3.6/site-packages/arch/univariate/base.py:522: ConvergenceWarning: The optimizer returned code 8. The message is: Positive directional derivative for linesearch See scipy.optimize.fmin_slsqp for code meaning. ConvergenceWarning) /Users/austincostello1/anaconda3/lib/python3.6/site-packages/arch/univariate/base.py:522: ConvergenceWarning: The optimizer returned code 8. The message is: Positive directional derivative for linesearch See scipy.optimize.fmin_slsqp for code meaning. ConvergenceWarning)
Iteration: 1, Func. Count: 5, Neg. LLF: -655.03201718895
Iteration: 2, Func. Count: 17, Neg. LLF: -655.0537521464796
Iteration: 3, Func. Count: 25, Neg. LLF: -655.0543659924253
Iteration: 4, Func. Count: 37, Neg. LLF: -655.055552137881
Positive directional derivative for linesearch (Exit mode 8)
Current function value: -655.0555521256433
Iterations: 8
Function evaluations: 37
Gradient evaluations: 4
Iteration: 1, Func. Count: 5, Neg. LLF: -655.3738071573721
Iteration: 2, Func. Count: 17, Neg. LLF: -655.3985885775953
Positive directional derivative for linesearch (Exit mode 8)
Current function value: -655.3985885654639
Iterations: 6
Function evaluations: 17
Gradient evaluations: 2
Iteration: 1, Func. Count: 5, Neg. LLF: -656.0420970992549
Iteration: 2, Func. Count: 17, Neg. LLF: -656.0946193286059
Iteration: 3, Func. Count: 26, Neg. LLF: -656.0952327832522
Iteration: 4, Func. Count: 34, Neg. LLF: -656.0953178742343
Iteration: 5, Func. Count: 42, Neg. LLF: -656.0954637671887
Optimization terminated successfully. (Exit mode 0)
Current function value: -656.0954637671445
Iterations: 5
Function evaluations: 53
Gradient evaluations: 5
Iteration: 1, Func. Count: 5, Neg. LLF: -657.9945323772657
Iteration: 2, Func. Count: 17, Neg. LLF: -658.0361767163117
Iteration: 3, Func. Count: 26, Neg. LLF: -658.0363444840336
Iteration: 4, Func. Count: 33, Neg. LLF: -658.0366932425923
Optimization terminated successfully. (Exit mode 0)
Current function value: -658.0366934236763
Iterations: 4
Function evaluations: 36
Gradient evaluations: 4
Iteration: 1, Func. Count: 5, Neg. LLF: -658.2903329379251
Iteration: 2, Func. Count: 17, Neg. LLF: -658.3193845270921
Optimization terminated successfully. (Exit mode 0)
Current function value: -658.3193845830797
Iterations: 3
Function evaluations: 25
Gradient evaluations: 2
Iteration: 1, Func. Count: 5, Neg. LLF: -658.2854102466508
Iteration: 2, Func. Count: 17, Neg. LLF: -658.323888844541
Iteration: 3, Func. Count: 26, Neg. LLF: -658.3240988483465
Optimization terminated successfully. (Exit mode 0)
Current function value: -658.3240995308344
Iterations: 3
Function evaluations: 31
Gradient evaluations: 3
Iteration: 1, Func. Count: 5, Neg. LLF: -658.3661780217333
Iteration: 2, Func. Count: 17, Neg. LLF: -658.4021349805414
Iteration: 3, Func. Count: 26, Neg. LLF: -658.4023065763358
Iteration: 4, Func. Count: 33, Neg. LLF: -658.4024724961039
Iteration: 5, Func. Count: 38, Neg. LLF: -658.4024898143457
Iteration: 6, Func. Count: 43, Neg. LLF: -658.4024929643662
Optimization terminated successfully. (Exit mode 0)
Current function value: -658.4024929644469
Iterations: 6
Function evaluations: 43
Gradient evaluations: 6
Iteration: 1, Func. Count: 5, Neg. LLF: -658.7144838628211
Iteration: 2, Func. Count: 17, Neg. LLF: -658.7487823327649
Iteration: 3, Func. Count: 26, Neg. LLF: -658.7488776323296
Iteration: 4, Func. Count: 33, Neg. LLF: -658.7492549191944
Optimization terminated successfully. (Exit mode 0)
Current function value: -658.7492549193221
Iterations: 4
Function evaluations: 44
Gradient evaluations: 4
Iteration: 1, Func. Count: 5, Neg. LLF: -659.053750345992
Iteration: 2, Func. Count: 17, Neg. LLF: -659.0953516775544
Iteration: 3, Func. Count: 27, Neg. LLF: -659.0953637099565
Iteration: 4, Func. Count: 34, Neg. LLF: -659.0956185735585
Optimization terminated successfully. (Exit mode 0)
Current function value: -659.0956186583256
Iterations: 4
Function evaluations: 39
Gradient evaluations: 4
Iteration: 1, Func. Count: 5, Neg. LLF: -659.4332405385485
Iteration: 2, Func. Count: 17, Neg. LLF: -659.4738981626647
Optimization terminated successfully. (Exit mode 0)
Current function value: -659.4738983057131
Iterations: 3
Function evaluations: 25
Gradient evaluations: 2
Iteration: 1, Func. Count: 5, Neg. LLF: -658.9117501684917
Iteration: 2, Func. Count: 17, Neg. LLF: -658.9441518000126
Iteration: 3, Func. Count: 25, Neg. LLF: -658.9444101264942
Iteration: 4, Func. Count: 32, Neg. LLF: -658.9446421072485
Iteration: 5, Func. Count: 40, Neg. LLF: -658.9447975242338
Iteration: 6, Func. Count: 45, Neg. LLF: -658.9448033542326
Optimization terminated successfully. (Exit mode 0)
Current function value: -658.9448033542394
Iterations: 6
Function evaluations: 45
Gradient evaluations: 6
Iteration: 1, Func. Count: 5, Neg. LLF: -659.1293687370014
Iteration: 2, Func. Count: 17, Neg. LLF: -659.1714111585118
Iteration: 3, Func. Count: 27, Neg. LLF: -659.1714141315772
Iteration: 4, Func. Count: 34, Neg. LLF: -659.1717400843448
Optimization terminated successfully. (Exit mode 0)
Current function value: -659.1717400841803
Iterations: 4
Function evaluations: 45
Gradient evaluations: 4
Iteration: 1, Func. Count: 5, Neg. LLF: -655.5002292168784
Iteration: 2, Func. Count: 17, Neg. LLF: -655.5301010935839
Iteration: 3, Func. Count: 24, Neg. LLF: -655.5370956326632
Iteration: 4, Func. Count: 31, Neg. LLF: -655.5497226539244
Iteration: 5, Func. Count: 43, Neg. LLF: -655.5497807099025
Iteration: 6, Func. Count: 52, Neg. LLF: -655.5502457873426
Iteration: 7, Func. Count: 59, Neg. LLF: -655.5573533110353
Iteration: 8, Func. Count: 68, Neg. LLF: -655.5573584744042
Iteration: 9, Func. Count: 73, Neg. LLF: -655.5574335850843
Optimization terminated successfully. (Exit mode 0)
Current function value: -655.5574335851247
Iterations: 10
Function evaluations: 73
Gradient evaluations: 9
Iteration: 1, Func. Count: 5, Neg. LLF: -655.0209781750121
Iteration: 2, Func. Count: 17, Neg. LLF: -655.0366500136824
Iteration: 3, Func. Count: 28, Neg. LLF: -655.0366514780753
Iteration: 4, Func. Count: 35, Neg. LLF: -655.0381261162834
Iteration: 5, Func. Count: 43, Neg. LLF: -655.0450870656562
Iteration: 6, Func. Count: 51, Neg. LLF: -655.0458124524422
Iteration: 7, Func. Count: 59, Neg. LLF: -655.0460008339103
Optimization terminated successfully. (Exit mode 0)
Current function value: -655.0460008342527
Iterations: 8
Function evaluations: 70
Gradient evaluations: 7
Iteration: 1, Func. Count: 5, Neg. LLF: -651.64322757429
Iteration: 2, Func. Count: 17, Neg. LLF: -651.6480059948183
Iteration: 3, Func. Count: 24, Neg. LLF: -651.6550300532737
Optimization terminated successfully. (Exit mode 0)
Current function value: -651.6550300515707
Iterations: 4
Function evaluations: 32
Gradient evaluations: 3
Iteration: 1, Func. Count: 5, Neg. LLF: -651.5965856859088
Iteration: 2, Func. Count: 17, Neg. LLF: -651.6101277101784
Iteration: 3, Func. Count: 24, Neg. LLF: -651.6112526561149
Positive directional derivative for linesearch (Exit mode 8)
Current function value: -651.6112526572997
Iterations: 7
Function evaluations: 24
Gradient evaluations: 3
Iteration: 1, Func. Count: 5, Neg. LLF: -651.1797373493579
Iteration: 2, Func. Count: 17, Neg. LLF: -651.1988779969261
Iteration: 3, Func. Count: 24, Neg. LLF: -651.2008700530779
Iteration: 4, Func. Count: 32, Neg. LLF: -651.2010372933778
Iteration: 5, Func. Count: 40, Neg. LLF: -651.201055523956
Optimization terminated successfully. (Exit mode 0)
Current function value: -651.2010555240336
Iterations: 5
Function evaluations: 51
Gradient evaluations: 5
Iteration: 1, Func. Count: 5, Neg. LLF: -647.6235697185081
Iteration: 2, Func. Count: 17, Neg. LLF: -647.6632070757496
Iteration: 3, Func. Count: 25, Neg. LLF: -647.6842678147659
Iteration: 4, Func. Count: 33, Neg. LLF: -647.6938941749329
Iteration: 5, Func. Count: 40, Neg. LLF: -647.6943764800499
Iteration: 6, Func. Count: 48, Neg. LLF: -647.6949751789718
Iteration: 7, Func. Count: 54, Neg. LLF: -647.6949763422734
Optimization terminated successfully. (Exit mode 0)
Current function value: -647.6949763444311
Iterations: 7
Function evaluations: 61
Gradient evaluations: 7
Iteration: 1, Func. Count: 5, Neg. LLF: -645.9225238385253
Iteration: 2, Func. Count: 17, Neg. LLF: -645.9243324753269
Iteration: 3, Func. Count: 26, Neg. LLF: -645.9256035786268
Iteration: 4, Func. Count: 33, Neg. LLF: -645.9481950191794
Iteration: 5, Func. Count: 41, Neg. LLF: -645.9482146213302
Iteration: 6, Func. Count: 46, Neg. LLF: -645.9482643821545
Optimization terminated successfully. (Exit mode 0)
Current function value: -645.9482643821896
Iterations: 6
Function evaluations: 46
Gradient evaluations: 6
/Users/austincostello1/anaconda3/lib/python3.6/site-packages/arch/univariate/base.py:522: ConvergenceWarning: The optimizer returned code 8. The message is: Positive directional derivative for linesearch See scipy.optimize.fmin_slsqp for code meaning. ConvergenceWarning)
Iteration: 1, Func. Count: 5, Neg. LLF: -646.3110712161359
Iteration: 2, Func. Count: 17, Neg. LLF: -646.3133044474647
Iteration: 3, Func. Count: 25, Neg. LLF: -646.3222518260402
Iteration: 4, Func. Count: 33, Neg. LLF: -646.3222590802343
Iteration: 5, Func. Count: 41, Neg. LLF: -646.3223863471537
Iteration: 6, Func. Count: 47, Neg. LLF: -646.3224046769883
Optimization terminated successfully. (Exit mode 0)
Current function value: -646.3224046770099
Iterations: 6
Function evaluations: 47
Gradient evaluations: 6
Iteration: 1, Func. Count: 5, Neg. LLF: -647.7452624518749
Iteration: 2, Func. Count: 17, Neg. LLF: -647.7483321343068
Iteration: 3, Func. Count: 26, Neg. LLF: -647.7492118246098
Iteration: 4, Func. Count: 35, Neg. LLF: -647.7534234759319
Iteration: 5, Func. Count: 43, Neg. LLF: -647.7583643528217
Iteration: 6, Func. Count: 50, Neg. LLF: -647.7588405191597
Iteration: 7, Func. Count: 58, Neg. LLF: -647.7588977307005
Iteration: 8, Func. Count: 63, Neg. LLF: -647.7588988675345
Optimization terminated successfully. (Exit mode 0)
Current function value: -647.7588988675295
Iterations: 9
Function evaluations: 63
Gradient evaluations: 8
Iteration: 1, Func. Count: 5, Neg. LLF: -648.278037659871
Iteration: 2, Func. Count: 17, Neg. LLF: -648.2799957614393
Iteration: 3, Func. Count: 25, Neg. LLF: -648.2800109704676
Optimization terminated successfully. (Exit mode 0)
Current function value: -648.2800111469805
Iterations: 4
Function evaluations: 33
Gradient evaluations: 3
Iteration: 1, Func. Count: 5, Neg. LLF: -648.5504949740225
Iteration: 2, Func. Count: 17, Neg. LLF: -648.5536843246408
Positive directional derivative for linesearch (Exit mode 8)
Current function value: -648.5536843160988
Iterations: 6
Function evaluations: 17
Gradient evaluations: 2
/Users/austincostello1/anaconda3/lib/python3.6/site-packages/arch/univariate/base.py:522: ConvergenceWarning: The optimizer returned code 8. The message is: Positive directional derivative for linesearch See scipy.optimize.fmin_slsqp for code meaning. ConvergenceWarning) /Users/austincostello1/anaconda3/lib/python3.6/site-packages/arch/univariate/base.py:522: ConvergenceWarning: The optimizer returned code 8. The message is: Positive directional derivative for linesearch See scipy.optimize.fmin_slsqp for code meaning. ConvergenceWarning) /Users/austincostello1/anaconda3/lib/python3.6/site-packages/arch/univariate/base.py:522: ConvergenceWarning: The optimizer returned code 8. The message is: Positive directional derivative for linesearch See scipy.optimize.fmin_slsqp for code meaning. ConvergenceWarning)
Iteration: 1, Func. Count: 5, Neg. LLF: -653.2509211854706
Iteration: 2, Func. Count: 17, Neg. LLF: -653.2558499485351
Positive directional derivative for linesearch (Exit mode 8)
Current function value: -653.2558499421586
Iterations: 6
Function evaluations: 17
Gradient evaluations: 2
Iteration: 1, Func. Count: 5, Neg. LLF: -654.536566876011
Iteration: 2, Func. Count: 17, Neg. LLF: -654.5393856749245
Positive directional derivative for linesearch (Exit mode 8)
Current function value: -654.5393856675594
Iterations: 6
Function evaluations: 17
Gradient evaluations: 2
Iteration: 1, Func. Count: 5, Neg. LLF: -654.6897824275787
Iteration: 2, Func. Count: 17, Neg. LLF: -654.6920530482637
Iteration: 3, Func. Count: 26, Neg. LLF: -654.6920829994333
Iteration: 4, Func. Count: 38, Neg. LLF: -654.6921367667619
Iteration: 5, Func. Count: 45, Neg. LLF: -654.6986698999732
Iteration: 6, Func. Count: 53, Neg. LLF: -654.6991921213526
Iteration: 7, Func. Count: 61, Neg. LLF: -654.6992179175619
Iteration: 8, Func. Count: 66, Neg. LLF: -654.6992263149348
Optimization terminated successfully. (Exit mode 0)
Current function value: -654.699226314937
Iterations: 10
Function evaluations: 66
Gradient evaluations: 8
Iteration: 1, Func. Count: 5, Neg. LLF: -653.7227659602012
Iteration: 2, Func. Count: 17, Neg. LLF: -653.7232242457367
Positive directional derivative for linesearch (Exit mode 8)
Current function value: -653.7232242342914
Iterations: 6
Function evaluations: 17
Gradient evaluations: 2
Iteration: 1, Func. Count: 5, Neg. LLF: -654.9317474045911
Iteration: 2, Func. Count: 17, Neg. LLF: -654.9318094757701
Positive directional derivative for linesearch (Exit mode 8)
Current function value: -654.9318094720564
Iterations: 6
Function evaluations: 17
Gradient evaluations: 2
Iteration: 1, Func. Count: 5, Neg. LLF: -649.5955054056274
Iteration: 2, Func. Count: 17, Neg. LLF: -649.5957472913115
Positive directional derivative for linesearch (Exit mode 8)
Current function value: -649.5957473046551
Iterations: 6
Function evaluations: 17
Gradient evaluations: 2
/Users/austincostello1/anaconda3/lib/python3.6/site-packages/arch/univariate/base.py:522: ConvergenceWarning: The optimizer returned code 8. The message is: Positive directional derivative for linesearch See scipy.optimize.fmin_slsqp for code meaning. ConvergenceWarning) /Users/austincostello1/anaconda3/lib/python3.6/site-packages/arch/univariate/base.py:522: ConvergenceWarning: The optimizer returned code 8. The message is: Positive directional derivative for linesearch See scipy.optimize.fmin_slsqp for code meaning. ConvergenceWarning) /Users/austincostello1/anaconda3/lib/python3.6/site-packages/arch/univariate/base.py:522: ConvergenceWarning: The optimizer returned code 8. The message is: Positive directional derivative for linesearch See scipy.optimize.fmin_slsqp for code meaning. ConvergenceWarning)
Iteration: 1, Func. Count: 5, Neg. LLF: -648.706694870179
Iteration: 2, Func. Count: 17, Neg. LLF: -648.7081914995907
Iteration: 3, Func. Count: 25, Neg. LLF: -648.7103909275115
Iteration: 4, Func. Count: 34, Neg. LLF: -648.7128836571916
Iteration: 5, Func. Count: 43, Neg. LLF: -648.7131804718654
Iteration: 6, Func. Count: 55, Neg. LLF: -648.7133401305284
Iteration: 7, Func. Count: 62, Neg. LLF: -648.7472189526981
Iteration: 8, Func. Count: 69, Neg. LLF: -648.7483145790804
Iteration: 9, Func. Count: 76, Neg. LLF: -648.7490772111452
Iteration: 10, Func. Count: 83, Neg. LLF: -648.7498020518681
Iteration: 11, Func. Count: 91, Neg. LLF: -648.749948469527
Iteration: 12, Func. Count: 96, Neg. LLF: -648.7510147231403
Optimization terminated successfully. (Exit mode 0)
Current function value: -648.7510149964098
Iterations: 13
Function evaluations: 98
Gradient evaluations: 12
Iteration: 1, Func. Count: 5, Neg. LLF: -647.547892835146
Iteration: 2, Func. Count: 17, Neg. LLF: -647.5494334959599
Positive directional derivative for linesearch (Exit mode 8)
Current function value: -647.549433507451
Iterations: 6
Function evaluations: 17
Gradient evaluations: 2
Iteration: 1, Func. Count: 5, Neg. LLF: -645.3655152593726
Iteration: 2, Func. Count: 17, Neg. LLF: -645.3709678126799
Iteration: 3, Func. Count: 24, Neg. LLF: -645.3762491110248
Iteration: 4, Func. Count: 31, Neg. LLF: -645.3769711546861
Iteration: 5, Func. Count: 39, Neg. LLF: -645.3769922223228
Iteration: 6, Func. Count: 46, Neg. LLF: -645.3782199941111
Optimization terminated successfully. (Exit mode 0)
Current function value: -645.3782199940802
Iterations: 6
Function evaluations: 46
Gradient evaluations: 6
/Users/austincostello1/anaconda3/lib/python3.6/site-packages/arch/univariate/base.py:522: ConvergenceWarning: The optimizer returned code 8. The message is: Positive directional derivative for linesearch See scipy.optimize.fmin_slsqp for code meaning. ConvergenceWarning) /Users/austincostello1/anaconda3/lib/python3.6/site-packages/arch/univariate/base.py:522: ConvergenceWarning: The optimizer returned code 8. The message is: Positive directional derivative for linesearch See scipy.optimize.fmin_slsqp for code meaning. ConvergenceWarning) /Users/austincostello1/anaconda3/lib/python3.6/site-packages/arch/univariate/base.py:522: ConvergenceWarning: The optimizer returned code 8. The message is: Positive directional derivative for linesearch See scipy.optimize.fmin_slsqp for code meaning. ConvergenceWarning)
Iteration: 1, Func. Count: 5, Neg. LLF: -644.6361114695692
Positive directional derivative for linesearch (Exit mode 8)
Current function value: -644.6361114584888
Iterations: 5
Function evaluations: 5
Gradient evaluations: 1
Iteration: 1, Func. Count: 5, Neg. LLF: -644.9282932728665
Iteration: 2, Func. Count: 17, Neg. LLF: -644.9298614267218
Positive directional derivative for linesearch (Exit mode 8)
Current function value: -644.9298614102563
Iterations: 6
Function evaluations: 17
Gradient evaluations: 2
Iteration: 1, Func. Count: 5, Neg. LLF: -644.7182489904662
Iteration: 2, Func. Count: 17, Neg. LLF: -644.7191028610941
Iteration: 3, Func. Count: 24, Neg. LLF: -644.728344969979
Iteration: 4, Func. Count: 32, Neg. LLF: -644.7283686993665
Iteration: 5, Func. Count: 40, Neg. LLF: -644.7292440937474
Iteration: 6, Func. Count: 49, Neg. LLF: -644.7292478132612
Iteration: 7, Func. Count: 54, Neg. LLF: -644.7292603946341
Optimization terminated successfully. (Exit mode 0)
Current function value: -644.729260394631
Iterations: 7
Function evaluations: 54
Gradient evaluations: 7
Iteration: 1, Func. Count: 5, Neg. LLF: -644.3834691420675
Iteration: 2, Func. Count: 17, Neg. LLF: -644.3838795083591
Iteration: 3, Func. Count: 24, Neg. LLF: -644.3905490047828
Iteration: 4, Func. Count: 32, Neg. LLF: -644.3908332771667
Iteration: 5, Func. Count: 40, Neg. LLF: -644.390901212167
Iteration: 6, Func. Count: 48, Neg. LLF: -644.3910328524559
Optimization terminated successfully. (Exit mode 0)
Current function value: -644.3910329167497
Iterations: 6
Function evaluations: 51
Gradient evaluations: 6
Iteration: 1, Func. Count: 5, Neg. LLF: -643.3534196848523
Iteration: 2, Func. Count: 17, Neg. LLF: -643.3549937054609
Positive directional derivative for linesearch (Exit mode 8)
Current function value: -643.3549936956797
Iterations: 6
Function evaluations: 17
Gradient evaluations: 2
Iteration: 1, Func. Count: 5, Neg. LLF: -641.8045051557739
Iteration: 2, Func. Count: 17, Neg. LLF: -641.8052514342066
Iteration: 3, Func. Count: 24, Neg. LLF: -641.8231068622737
Iteration: 4, Func. Count: 32, Neg. LLF: -641.8247185635499
Iteration: 5, Func. Count: 40, Neg. LLF: -641.8253164194529
Iteration: 6, Func. Count: 48, Neg. LLF: -641.8254213180362
Iteration: 7, Func. Count: 54, Neg. LLF: -641.8254494247323
Iteration: 8, Func. Count: 59, Neg. LLF: -641.8254794745721
Optimization terminated successfully. (Exit mode 0)
Current function value: -641.825479640031
Iterations: 8
Function evaluations: 62
Gradient evaluations: 8
Iteration: 1, Func. Count: 5, Neg. LLF: -641.4260125812044
Iteration: 2, Func. Count: 17, Neg. LLF: -641.4274149381478
Positive directional derivative for linesearch (Exit mode 8)
Current function value: -641.4274149312719
Iterations: 6
Function evaluations: 17
Gradient evaluations: 2
Iteration: 1, Func. Count: 5, Neg. LLF: -641.36145703376
Iteration: 2, Func. Count: 17, Neg. LLF: -641.3655337677462
Positive directional derivative for linesearch (Exit mode 8)
Current function value: -641.3655337533403
Iterations: 6
Function evaluations: 17
Gradient evaluations: 2
/Users/austincostello1/anaconda3/lib/python3.6/site-packages/arch/univariate/base.py:522: ConvergenceWarning: The optimizer returned code 8. The message is: Positive directional derivative for linesearch See scipy.optimize.fmin_slsqp for code meaning. ConvergenceWarning) /Users/austincostello1/anaconda3/lib/python3.6/site-packages/arch/univariate/base.py:522: ConvergenceWarning: The optimizer returned code 8. The message is: Positive directional derivative for linesearch See scipy.optimize.fmin_slsqp for code meaning. ConvergenceWarning) /Users/austincostello1/anaconda3/lib/python3.6/site-packages/arch/univariate/base.py:522: ConvergenceWarning: The optimizer returned code 8. The message is: Positive directional derivative for linesearch See scipy.optimize.fmin_slsqp for code meaning. ConvergenceWarning)
Iteration: 1, Func. Count: 5, Neg. LLF: -640.0420003179918
Iteration: 2, Func. Count: 17, Neg. LLF: -640.0432927410393
Iteration: 3, Func. Count: 24, Neg. LLF: -640.0563798692112
Optimization terminated successfully. (Exit mode 0)
Current function value: -640.0563800808305
Iterations: 4
Function evaluations: 32
Gradient evaluations: 3
Iteration: 1, Func. Count: 5, Neg. LLF: -639.7720009552286
Iteration: 2, Func. Count: 17, Neg. LLF: -639.7759613958779
Iteration: 3, Func. Count: 24, Neg. LLF: -639.784401601203
Iteration: 4, Func. Count: 32, Neg. LLF: -639.7846201313985
Iteration: 5, Func. Count: 40, Neg. LLF: -639.7846883274979
Optimization terminated successfully. (Exit mode 0)
Current function value: -639.7846883274617
Iterations: 5
Function evaluations: 51
Gradient evaluations: 5
Iteration: 1, Func. Count: 5, Neg. LLF: -639.651143146516
Iteration: 2, Func. Count: 17, Neg. LLF: -639.653846016274
Iteration: 3, Func. Count: 24, Neg. LLF: -639.6563147244144
Iteration: 4, Func. Count: 32, Neg. LLF: -639.6573407895955
Iteration: 5, Func. Count: 40, Neg. LLF: -639.6573451943798
Optimization terminated successfully. (Exit mode 0)
Current function value: -639.6573452047733
Iterations: 5
Function evaluations: 47
Gradient evaluations: 5
Iteration: 1, Func. Count: 5, Neg. LLF: -637.332398527238
Iteration: 2, Func. Count: 17, Neg. LLF: -637.3324503045574
Iteration: 3, Func. Count: 25, Neg. LLF: -637.3373648399383
Iteration: 4, Func. Count: 33, Neg. LLF: -637.3374931730359
Iteration: 5, Func. Count: 41, Neg. LLF: -637.3375674201914
Optimization terminated successfully. (Exit mode 0)
Current function value: -637.337567436657
Iterations: 5
Function evaluations: 45
Gradient evaluations: 5
Iteration: 1, Func. Count: 5, Neg. LLF: -637.1441545330567
Iteration: 2, Func. Count: 17, Neg. LLF: -637.1487153831902
Iteration: 3, Func. Count: 24, Neg. LLF: -637.1570643582152
Optimization terminated successfully. (Exit mode 0)
Current function value: -637.1570644318509
Iterations: 4
Function evaluations: 32
Gradient evaluations: 3
Iteration: 1, Func. Count: 5, Neg. LLF: -636.3366484480097
Iteration: 2, Func. Count: 17, Neg. LLF: -636.3378975232313
Iteration: 3, Func. Count: 24, Neg. LLF: -636.3403873306422
Iteration: 4, Func. Count: 31, Neg. LLF: -636.3449545864639
Iteration: 5, Func. Count: 38, Neg. LLF: -636.3473142962232
Iteration: 6, Func. Count: 47, Neg. LLF: -636.3473216341422
Iteration: 7, Func. Count: 53, Neg. LLF: -636.3473867471876
Optimization terminated successfully. (Exit mode 0)
Current function value: -636.3473873509889
Iterations: 7
Function evaluations: 55
Gradient evaluations: 7
Iteration: 1, Func. Count: 5, Neg. LLF: -635.0844082999154
Iteration: 2, Func. Count: 17, Neg. LLF: -635.0850892170724
Iteration: 3, Func. Count: 24, Neg. LLF: -635.105758633958
Iteration: 4, Func. Count: 31, Neg. LLF: -635.1092813772686
Iteration: 5, Func. Count: 38, Neg. LLF: -635.1102760705555
Iteration: 6, Func. Count: 45, Neg. LLF: -635.111352875567
Iteration: 7, Func. Count: 53, Neg. LLF: -635.1114579043335
Iteration: 8, Func. Count: 58, Neg. LLF: -635.1116038415599
Optimization terminated successfully. (Exit mode 0)
Current function value: -635.1116038415952
Iterations: 8
Function evaluations: 58
Gradient evaluations: 8
Iteration: 1, Func. Count: 5, Neg. LLF: -635.083027066376
Iteration: 2, Func. Count: 17, Neg. LLF: -635.0832440338172
Iteration: 3, Func. Count: 25, Neg. LLF: -635.1057613735877
Iteration: 4, Func. Count: 34, Neg. LLF: -635.1108890332082
Iteration: 5, Func. Count: 41, Neg. LLF: -635.114250057241
Iteration: 6, Func. Count: 49, Neg. LLF: -635.1143961680643
Iteration: 7, Func. Count: 54, Neg. LLF: -635.114400325438
Optimization terminated successfully. (Exit mode 0)
Current function value: -635.1144003253755
Iterations: 8
Function evaluations: 54
Gradient evaluations: 7
Iteration: 1, Func. Count: 5, Neg. LLF: -633.6929540255923
Iteration: 2, Func. Count: 17, Neg. LLF: -633.6982106324455
Positive directional derivative for linesearch (Exit mode 8)
Current function value: -633.6982106193916
Iterations: 6
Function evaluations: 17
Gradient evaluations: 2
Iteration: 1, Func. Count: 5, Neg. LLF: -633.2875511236915
Iteration: 2, Func. Count: 17, Neg. LLF: -633.2885336193833
Positive directional derivative for linesearch (Exit mode 8)
Current function value: -633.2885336157146
Iterations: 6
Function evaluations: 17
Gradient evaluations: 2
/Users/austincostello1/anaconda3/lib/python3.6/site-packages/arch/univariate/base.py:522: ConvergenceWarning: The optimizer returned code 8. The message is: Positive directional derivative for linesearch See scipy.optimize.fmin_slsqp for code meaning. ConvergenceWarning) /Users/austincostello1/anaconda3/lib/python3.6/site-packages/arch/univariate/base.py:522: ConvergenceWarning: The optimizer returned code 8. The message is: Positive directional derivative for linesearch See scipy.optimize.fmin_slsqp for code meaning. ConvergenceWarning)
Iteration: 1, Func. Count: 5, Neg. LLF: -632.7657226484457
Iteration: 2, Func. Count: 17, Neg. LLF: -632.7658592199462
Iteration: 3, Func. Count: 25, Neg. LLF: -632.790041788824
Iteration: 4, Func. Count: 32, Neg. LLF: -632.7916921101548
Iteration: 5, Func. Count: 41, Neg. LLF: -632.7916991519406
Iteration: 6, Func. Count: 47, Neg. LLF: -632.7921387456245
Optimization terminated successfully. (Exit mode 0)
Current function value: -632.7921390028637
Iterations: 6
Function evaluations: 48
Gradient evaluations: 6
Iteration: 1, Func. Count: 5, Neg. LLF: -633.3053963378418
Iteration: 2, Func. Count: 17, Neg. LLF: -633.3054019900362
Iteration: 3, Func. Count: 25, Neg. LLF: -633.3240232946397
Iteration: 4, Func. Count: 34, Neg. LLF: -633.3243958454201
Iteration: 5, Func. Count: 41, Neg. LLF: -633.3267610386476
Iteration: 6, Func. Count: 49, Neg. LLF: -633.3269746791618
Iteration: 7, Func. Count: 54, Neg. LLF: -633.3269794610821
Optimization terminated successfully. (Exit mode 0)
Current function value: -633.3269794610742
Iterations: 7
Function evaluations: 54
Gradient evaluations: 7
Iteration: 1, Func. Count: 5, Neg. LLF: -633.0183098587431
Iteration: 2, Func. Count: 17, Neg. LLF: -633.0184236955015
Positive directional derivative for linesearch (Exit mode 8)
Current function value: -633.0184236934474
Iterations: 6
Function evaluations: 17
Gradient evaluations: 2
Iteration: 1, Func. Count: 5, Neg. LLF: -632.0169925450577
Iteration: 2, Func. Count: 17, Neg. LLF: -632.0170137985144
Positive directional derivative for linesearch (Exit mode 8)
Current function value: -632.0170137930724
Iterations: 6
Function evaluations: 17
Gradient evaluations: 2
/Users/austincostello1/anaconda3/lib/python3.6/site-packages/arch/univariate/base.py:522: ConvergenceWarning: The optimizer returned code 8. The message is: Positive directional derivative for linesearch See scipy.optimize.fmin_slsqp for code meaning. ConvergenceWarning) /Users/austincostello1/anaconda3/lib/python3.6/site-packages/arch/univariate/base.py:522: ConvergenceWarning: The optimizer returned code 8. The message is: Positive directional derivative for linesearch See scipy.optimize.fmin_slsqp for code meaning. ConvergenceWarning)
Iteration: 1, Func. Count: 5, Neg. LLF: -632.1701246217718
Iteration: 2, Func. Count: 17, Neg. LLF: -632.170128877402
Iteration: 3, Func. Count: 25, Neg. LLF: -632.199602413486
Iteration: 4, Func. Count: 35, Neg. LLF: -632.2003296176613
Iteration: 5, Func. Count: 42, Neg. LLF: -632.2088003634108
Iteration: 6, Func. Count: 49, Neg. LLF: -632.208988521393
Iteration: 7, Func. Count: 54, Neg. LLF: -632.2089904154852
Optimization terminated successfully. (Exit mode 0)
Current function value: -632.208990415474
Iterations: 7
Function evaluations: 54
Gradient evaluations: 7
Iteration: 1, Func. Count: 5, Neg. LLF: -631.1766975773489
Iteration: 2, Func. Count: 17, Neg. LLF: -631.1767953067458
Iteration: 3, Func. Count: 24, Neg. LLF: -631.2001931810989
Iteration: 4, Func. Count: 36, Neg. LLF: -631.2002226638023
Iteration: 5, Func. Count: 44, Neg. LLF: -631.2024157001902
Iteration: 6, Func. Count: 51, Neg. LLF: -631.2048154758512
Iteration: 7, Func. Count: 59, Neg. LLF: -631.2053734653113
Iteration: 8, Func. Count: 64, Neg. LLF: -631.2053963915122
Optimization terminated successfully. (Exit mode 0)
Current function value: -631.2053963914854
Iterations: 9
Function evaluations: 64
Gradient evaluations: 8
Iteration: 1, Func. Count: 5, Neg. LLF: -631.1494563979622
Iteration: 2, Func. Count: 17, Neg. LLF: -631.1497204656339
Iteration: 3, Func. Count: 27, Neg. LLF: -631.1497277273254
Iteration: 4, Func. Count: 34, Neg. LLF: -631.1662627186747
Iteration: 5, Func. Count: 46, Neg. LLF: -631.1662936512528
Iteration: 6, Func. Count: 54, Neg. LLF: -631.1687083365185
Iteration: 7, Func. Count: 62, Neg. LLF: -631.1691310000635
Optimization terminated successfully. (Exit mode 0)
Current function value: -631.1691310001647
Iterations: 9
Function evaluations: 73
Gradient evaluations: 7
Iteration: 1, Func. Count: 5, Neg. LLF: -631.1064835779316
Iteration: 2, Func. Count: 17, Neg. LLF: -631.1066026376855
Iteration: 3, Func. Count: 24, Neg. LLF: -631.1200580050418
Iteration: 4, Func. Count: 35, Neg. LLF: -631.1200661412146
Iteration: 5, Func. Count: 43, Neg. LLF: -631.1213568558497
Iteration: 6, Func. Count: 50, Neg. LLF: -631.12217717662
Iteration: 7, Func. Count: 59, Neg. LLF: -631.1221875304666
Iteration: 8, Func. Count: 64, Neg. LLF: -631.1222844084862
Optimization terminated successfully. (Exit mode 0)
Current function value: -631.1222844084939
Iterations: 9
Function evaluations: 64
Gradient evaluations: 8
Iteration: 1, Func. Count: 5, Neg. LLF: -631.1027435083587
Iteration: 2, Func. Count: 17, Neg. LLF: -631.1028258971786
Positive directional derivative for linesearch (Exit mode 8)
Current function value: -631.1028258795029
Iterations: 6
Function evaluations: 17
Gradient evaluations: 2
Iteration: 1, Func. Count: 5, Neg. LLF: -631.0981384709272
Iteration: 2, Func. Count: 17, Neg. LLF: -631.098246719801
Iteration: 3, Func. Count: 27, Neg. LLF: -631.0982495420953
Iteration: 4, Func. Count: 34, Neg. LLF: -631.1106895462657
Iteration: 5, Func. Count: 42, Neg. LLF: -631.1107571824333
Iteration: 6, Func. Count: 54, Neg. LLF: -631.1108128704013
Iteration: 7, Func. Count: 62, Neg. LLF: -631.1115372307039
Iteration: 8, Func. Count: 69, Neg. LLF: -631.1129487987826
Iteration: 9, Func. Count: 77, Neg. LLF: -631.1135546242192
Iteration: 10, Func. Count: 82, Neg. LLF: -631.1135703898319
Optimization terminated successfully. (Exit mode 0)
Current function value: -631.113570389789
Iterations: 12
Function evaluations: 82
Gradient evaluations: 10
Iteration: 1, Func. Count: 5, Neg. LLF: -630.8080856968535
Iteration: 2, Func. Count: 17, Neg. LLF: -630.8082576543034
Iteration: 3, Func. Count: 24, Neg. LLF: -630.8236734989838
/Users/austincostello1/anaconda3/lib/python3.6/site-packages/arch/univariate/base.py:522: ConvergenceWarning: The optimizer returned code 8. The message is: Positive directional derivative for linesearch See scipy.optimize.fmin_slsqp for code meaning. ConvergenceWarning) /Users/austincostello1/anaconda3/lib/python3.6/site-packages/arch/univariate/base.py:522: ConvergenceWarning: The optimizer returned code 8. The message is: Positive directional derivative for linesearch See scipy.optimize.fmin_slsqp for code meaning. ConvergenceWarning)
Optimization terminated successfully. (Exit mode 0)
Current function value: -630.8236738376646
Iterations: 3
Function evaluations: 30
Gradient evaluations: 3
Iteration: 1, Func. Count: 5, Neg. LLF: -631.1889016298417
Iteration: 2, Func. Count: 17, Neg. LLF: -631.1889254527389
Positive directional derivative for linesearch (Exit mode 8)
Current function value: -631.1889254347998
Iterations: 6
Function evaluations: 17
Gradient evaluations: 2
Iteration: 1, Func. Count: 5, Neg. LLF: -631.3796736640834
Iteration: 2, Func. Count: 17, Neg. LLF: -631.3798538894454
Positive directional derivative for linesearch (Exit mode 8)
Current function value: -631.3798538694407
Iterations: 6
Function evaluations: 17
Gradient evaluations: 2
Iteration: 1, Func. Count: 5, Neg. LLF: -631.3363126188262
Iteration: 2, Func. Count: 17, Neg. LLF: -631.3363998891903
Positive directional derivative for linesearch (Exit mode 8)
Current function value: -631.3363998703795
Iterations: 6
Function evaluations: 17
Gradient evaluations: 2
Iteration: 1, Func. Count: 5, Neg. LLF: -631.3192840773195
Iteration: 2, Func. Count: 17, Neg. LLF: -631.3193561970592
/Users/austincostello1/anaconda3/lib/python3.6/site-packages/arch/univariate/base.py:522: ConvergenceWarning: The optimizer returned code 8. The message is: Positive directional derivative for linesearch See scipy.optimize.fmin_slsqp for code meaning. ConvergenceWarning) /Users/austincostello1/anaconda3/lib/python3.6/site-packages/arch/univariate/base.py:522: ConvergenceWarning: The optimizer returned code 8. The message is: Positive directional derivative for linesearch See scipy.optimize.fmin_slsqp for code meaning. ConvergenceWarning)
Iteration: 3, Func. Count: 27, Neg. LLF: -631.3193680073587
Iteration: 4, Func. Count: 39, Neg. LLF: -631.3193800539149
Iteration: 5, Func. Count: 46, Neg. LLF: -631.3365894513129
Iteration: 6, Func. Count: 56, Neg. LLF: -631.336635934252
Iteration: 7, Func. Count: 65, Neg. LLF: -631.3378172294781
Iteration: 8, Func. Count: 72, Neg. LLF: -631.3382924456445
Iteration: 9, Func. Count: 80, Neg. LLF: -631.3385999467314
Iteration: 10, Func. Count: 86, Neg. LLF: -631.338912766653
Iteration: 11, Func. Count: 91, Neg. LLF: -631.3389137858057
Optimization terminated successfully. (Exit mode 0)
Current function value: -631.3389137858123
Iterations: 14
Function evaluations: 91
Gradient evaluations: 11
Iteration: 1, Func. Count: 5, Neg. LLF: -631.3743160797424
Iteration: 2, Func. Count: 17, Neg. LLF: -631.3745634623456
Iteration: 3, Func. Count: 24, Neg. LLF: -631.3955484815124
Iteration: 4, Func. Count: 32, Neg. LLF: -631.3980570945275
Iteration: 5, Func. Count: 40, Neg. LLF: -631.3984811601936
Optimization terminated successfully. (Exit mode 0)
Current function value: -631.3984811598775
Iterations: 5
Function evaluations: 51
Gradient evaluations: 5
Iteration: 1, Func. Count: 5, Neg. LLF: -631.753799636163
Iteration: 2, Func. Count: 17, Neg. LLF: -631.7538247699138
Positive directional derivative for linesearch (Exit mode 8)
Current function value: -631.7538247442135
Iterations: 6
Function evaluations: 17
Gradient evaluations: 2
Iteration: 1, Func. Count: 5, Neg. LLF: -631.5049323555238
Positive directional derivative for linesearch (Exit mode 8)
Current function value: -631.5049323331748
Iterations: 5
Function evaluations: 5
Gradient evaluations: 1
/Users/austincostello1/anaconda3/lib/python3.6/site-packages/arch/univariate/base.py:522: ConvergenceWarning: The optimizer returned code 8. The message is: Positive directional derivative for linesearch See scipy.optimize.fmin_slsqp for code meaning. ConvergenceWarning) /Users/austincostello1/anaconda3/lib/python3.6/site-packages/arch/univariate/base.py:522: ConvergenceWarning: The optimizer returned code 8. The message is: Positive directional derivative for linesearch See scipy.optimize.fmin_slsqp for code meaning. ConvergenceWarning) /Users/austincostello1/anaconda3/lib/python3.6/site-packages/arch/univariate/base.py:522: ConvergenceWarning: The optimizer returned code 8. The message is: Positive directional derivative for linesearch See scipy.optimize.fmin_slsqp for code meaning. ConvergenceWarning)
Iteration: 1, Func. Count: 5, Neg. LLF: -631.1877191969332
Iteration: 2, Func. Count: 17, Neg. LLF: -631.1877712245062
Positive directional derivative for linesearch (Exit mode 8)
Current function value: -631.1877712051967
Iterations: 6
Function evaluations: 17
Gradient evaluations: 2
Iteration: 1, Func. Count: 5, Neg. LLF: -631.2424368407156
Iteration: 2, Func. Count: 17, Neg. LLF: -631.2429921259873
Iteration: 3, Func. Count: 24, Neg. LLF: -631.2604221669328
Optimization terminated successfully. (Exit mode 0)
Current function value: -631.260422807113
Iterations: 4
Function evaluations: 31
Gradient evaluations: 3
Iteration: 1, Func. Count: 5, Neg. LLF: -629.2841610105766
Iteration: 2, Func. Count: 17, Neg. LLF: -629.2868642227963
Iteration: 3, Func. Count: 25, Neg. LLF: -629.2884242869219
Iteration: 4, Func. Count: 32, Neg. LLF: -629.2908084612995
Iteration: 5, Func. Count: 40, Neg. LLF: -629.291532233266
Iteration: 6, Func. Count: 45, Neg. LLF: -629.29156305906
Optimization terminated successfully. (Exit mode 0)
Current function value: -629.2915630590156
Iterations: 6
Function evaluations: 45
Gradient evaluations: 6
Iteration: 1, Func. Count: 5, Neg. LLF: -628.7539367940682
Iteration: 2, Func. Count: 17, Neg. LLF: -628.7611341734703
Positive directional derivative for linesearch (Exit mode 8)
Current function value: -628.7611341738361
Iterations: 6
Function evaluations: 17
Gradient evaluations: 2
/Users/austincostello1/anaconda3/lib/python3.6/site-packages/arch/univariate/base.py:522: ConvergenceWarning: The optimizer returned code 8. The message is: Positive directional derivative for linesearch See scipy.optimize.fmin_slsqp for code meaning. ConvergenceWarning) /Users/austincostello1/anaconda3/lib/python3.6/site-packages/arch/univariate/base.py:522: ConvergenceWarning: The optimizer returned code 8. The message is: Positive directional derivative for linesearch See scipy.optimize.fmin_slsqp for code meaning. ConvergenceWarning)
Iteration: 1, Func. Count: 5, Neg. LLF: -625.7669671279057
Iteration: 2, Func. Count: 17, Neg. LLF: -625.7839732065154
Positive directional derivative for linesearch (Exit mode 8)
Current function value: -625.7839732075755
Iterations: 6
Function evaluations: 17
Gradient evaluations: 2
Iteration: 1, Func. Count: 5, Neg. LLF: -625.316816744963
Iteration: 2, Func. Count: 17, Neg. LLF: -625.3294357871174
Iteration: 3, Func. Count: 26, Neg. LLF: -625.3294937164961
Iteration: 4, Func. Count: 33, Neg. LLF: -625.348738481056
Iteration: 5, Func. Count: 41, Neg. LLF: -625.3502669659149
Iteration: 6, Func. Count: 48, Neg. LLF: -625.3514310182597
Iteration: 7, Func. Count: 56, Neg. LLF: -625.3514808471986
Iteration: 8, Func. Count: 63, Neg. LLF: -625.3514841273964
Optimization terminated successfully. (Exit mode 0)
Current function value: -625.351484127415
Iterations: 9
Function evaluations: 63
Gradient evaluations: 8
Iteration: 1, Func. Count: 5, Neg. LLF: -624.1283989001645
Iteration: 2, Func. Count: 17, Neg. LLF: -624.1361757586794
Iteration: 3, Func. Count: 24, Neg. LLF: -624.1449305928209
Iteration: 4, Func. Count: 31, Neg. LLF: -624.147099229266
Iteration: 5, Func. Count: 40, Neg. LLF: -624.1471053258232
Iteration: 6, Func. Count: 46, Neg. LLF: -624.1471194740641
Optimization terminated successfully. (Exit mode 0)
Current function value: -624.1471194739778
Iterations: 6
Function evaluations: 57
Gradient evaluations: 6
Iteration: 1, Func. Count: 5, Neg. LLF: -623.7868051840117
Iteration: 2, Func. Count: 17, Neg. LLF: -623.791288921085
Iteration: 3, Func. Count: 24, Neg. LLF: -623.7948118373536
Iteration: 4, Func. Count: 32, Neg. LLF: -623.7957191735605
Iteration: 5, Func. Count: 40, Neg. LLF: -623.7957351869345
Optimization terminated successfully. (Exit mode 0)
Current function value: -623.7957351932703
Iterations: 5
Function evaluations: 47
Gradient evaluations: 5
Iteration: 1, Func. Count: 5, Neg. LLF: -622.1964219535039
Iteration: 2, Func. Count: 17, Neg. LLF: -622.1995946062216
Iteration: 3, Func. Count: 26, Neg. LLF: -622.200285065157
Iteration: 4, Func. Count: 33, Neg. LLF: -622.2044898966694
Iteration: 5, Func. Count: 40, Neg. LLF: -622.2044928754078
Iteration: 6, Func. Count: 46, Neg. LLF: -622.2045343440542
Optimization terminated successfully. (Exit mode 0)
Current function value: -622.2045343440116
Iterations: 6
Function evaluations: 46
Gradient evaluations: 6
Iteration: 1, Func. Count: 5, Neg. LLF: -621.83574924911
Iteration: 2, Func. Count: 17, Neg. LLF: -621.8369545294975
Iteration: 3, Func. Count: 25, Neg. LLF: -621.8384519771475
Iteration: 4, Func. Count: 32, Neg. LLF: -621.8386009648697
Iteration: 5, Func. Count: 39, Neg. LLF: -621.8386039162382
Iteration: 6, Func. Count: 44, Neg. LLF: -621.8386112588537
Optimization terminated successfully. (Exit mode 0)
Current function value: -621.8386112589016
Iterations: 6
Function evaluations: 44
Gradient evaluations: 6
Iteration: 1, Func. Count: 5, Neg. LLF: -620.5173923432849
Iteration: 2, Func. Count: 17, Neg. LLF: -620.5182477250921
Iteration: 3, Func. Count: 25, Neg. LLF: -620.523805509053
Iteration: 4, Func. Count: 32, Neg. LLF: -620.5250607278585
Iteration: 5, Func. Count: 39, Neg. LLF: -620.5251202512151
Iteration: 6, Func. Count: 45, Neg. LLF: -620.5251479199076
Optimization terminated successfully. (Exit mode 0)
Current function value: -620.5251479200276
Iterations: 6
Function evaluations: 45
Gradient evaluations: 6
Iteration: 1, Func. Count: 5, Neg. LLF: -620.5414799846965
Iteration: 2, Func. Count: 17, Neg. LLF: -620.5416412621443
Iteration: 3, Func. Count: 25, Neg. LLF: -620.5473486820174
Iteration: 4, Func. Count: 32, Neg. LLF: -620.5517153193628
Iteration: 5, Func. Count: 40, Neg. LLF: -620.5517644958989
Iteration: 6, Func. Count: 45, Neg. LLF: -620.551765644414
Optimization terminated successfully. (Exit mode 0)
Current function value: -620.5517656444079
Iterations: 6
Function evaluations: 45
Gradient evaluations: 6
Iteration: 1, Func. Count: 5, Neg. LLF: -619.8521341345429
Iteration: 2, Func. Count: 17, Neg. LLF: -619.8524813596864
Positive directional derivative for linesearch (Exit mode 8)
Current function value: -619.8524813654523
Iterations: 6
Function evaluations: 17
Gradient evaluations: 2
Iteration: 1, Func. Count: 5, Neg. LLF: -619.8867598908917
Iteration: 2, Func. Count: 17, Neg. LLF: -619.8867780367439
Positive directional derivative for linesearch (Exit mode 8)
Current function value: -619.8867780477739
Iterations: 6
Function evaluations: 17
Gradient evaluations: 2
Iteration: 1, Func. Count: 5, Neg. LLF: -619.7135602995018
Iteration: 2, Func. Count: 17, Neg. LLF: -619.71358694646
Optimization terminated successfully. (Exit mode 0)
Current function value: -619.7135869609012
Iterations: 3
Function evaluations: 26
Gradient evaluations: 2
Iteration: 1, Func. Count: 5, Neg. LLF: -618.1481693377227
/Users/austincostello1/anaconda3/lib/python3.6/site-packages/arch/univariate/base.py:522: ConvergenceWarning: The optimizer returned code 8. The message is: Positive directional derivative for linesearch See scipy.optimize.fmin_slsqp for code meaning. ConvergenceWarning) /Users/austincostello1/anaconda3/lib/python3.6/site-packages/arch/univariate/base.py:522: ConvergenceWarning: The optimizer returned code 8. The message is: Positive directional derivative for linesearch See scipy.optimize.fmin_slsqp for code meaning. ConvergenceWarning)
Iteration: 2, Func. Count: 17, Neg. LLF: -618.1497398216704
Iteration: 3, Func. Count: 25, Neg. LLF: -618.1643910581954
Iteration: 4, Func. Count: 33, Neg. LLF: -618.1646253592025
Iteration: 5, Func. Count: 40, Neg. LLF: -618.1711619276008
Iteration: 6, Func. Count: 48, Neg. LLF: -618.1712917402056
Iteration: 7, Func. Count: 53, Neg. LLF: -618.1712938073151
Optimization terminated successfully. (Exit mode 0)
Current function value: -618.171293807311
Iterations: 7
Function evaluations: 53
Gradient evaluations: 7
Iteration: 1, Func. Count: 5, Neg. LLF: -617.9443561455694
Iteration: 2, Func. Count: 17, Neg. LLF: -617.9447454912026
Iteration: 3, Func. Count: 24, Neg. LLF: -617.9689420636778
Iteration: 4, Func. Count: 32, Neg. LLF: -617.9694018395924
Iteration: 5, Func. Count: 40, Neg. LLF: -617.9695761881535
Iteration: 6, Func. Count: 48, Neg. LLF: -617.9697959088705
Iteration: 7, Func. Count: 53, Neg. LLF: -617.9698007811934
Optimization terminated successfully. (Exit mode 0)
Current function value: -617.9698007811876
Iterations: 7
Function evaluations: 53
Gradient evaluations: 7
Iteration: 1, Func. Count: 5, Neg. LLF: -618.0898553865992
Iteration: 2, Func. Count: 17, Neg. LLF: -618.0902651401127
Iteration: 3, Func. Count: 24, Neg. LLF: -618.1176142504339
Iteration: 4, Func. Count: 32, Neg. LLF: -618.1181614424542
Iteration: 5, Func. Count: 41, Neg. LLF: -618.1181627103463
Iteration: 6, Func. Count: 47, Neg. LLF: -618.1181638878286
Optimization terminated successfully. (Exit mode 0)
Current function value: -618.1181638878215
Iterations: 6
Function evaluations: 47
Gradient evaluations: 6
Iteration: 1, Func. Count: 5, Neg. LLF: -616.7688600131135
Iteration: 2, Func. Count: 17, Neg. LLF: -616.7690850226068
Iteration: 3, Func. Count: 24, Neg. LLF: -616.7775843120045
Iteration: 4, Func. Count: 31, Neg. LLF: -616.7782468719192
Optimization terminated successfully. (Exit mode 0)
Current function value: -616.7782480080208
Iterations: 5
Function evaluations: 39
Gradient evaluations: 4
Iteration: 1, Func. Count: 5, Neg. LLF: -616.2420209062542
Iteration: 2, Func. Count: 17, Neg. LLF: -616.2429843846705
Iteration: 3, Func. Count: 24, Neg. LLF: -616.251632742272
Iteration: 4, Func. Count: 32, Neg. LLF: -616.2524310314975
Iteration: 5, Func. Count: 39, Neg. LLF: -616.2526444900152
Optimization terminated successfully. (Exit mode 0)
Current function value: -616.2526444899131
Iterations: 5
Function evaluations: 50
Gradient evaluations: 5
Iteration: 1, Func. Count: 5, Neg. LLF: -616.0520867982254
Iteration: 2, Func. Count: 17, Neg. LLF: -616.0527605266221
Iteration: 3, Func. Count: 24, Neg. LLF: -616.0555008822819
Iteration: 4, Func. Count: 32, Neg. LLF: -616.056122122992
Optimization terminated successfully. (Exit mode 0)
Current function value: -616.0561221224441
Iterations: 4
Function evaluations: 43
Gradient evaluations: 4
Iteration: 1, Func. Count: 5, Neg. LLF: -616.1896268394069
Iteration: 2, Func. Count: 17, Neg. LLF: -616.1913658346032
Iteration: 3, Func. Count: 24, Neg. LLF: -616.19470372668
Positive directional derivative for linesearch (Exit mode 8)
Current function value: -616.1947037248219
Iterations: 7
Function evaluations: 24
Gradient evaluations: 3
Iteration: 1, Func. Count: 5, Neg. LLF: -616.972918331488
Iteration: 2, Func. Count: 17, Neg. LLF: -616.9756162463152
Iteration: 3, Func. Count: 24, Neg. LLF: -616.9871311485126
Iteration: 4, Func. Count: 32, Neg. LLF: -616.9877275004594
Optimization terminated successfully. (Exit mode 0)
Current function value: -616.9877275749714
Iterations: 4
Function evaluations: 39
Gradient evaluations: 4
/Users/austincostello1/anaconda3/lib/python3.6/site-packages/arch/univariate/base.py:522: ConvergenceWarning: The optimizer returned code 8. The message is: Positive directional derivative for linesearch See scipy.optimize.fmin_slsqp for code meaning. ConvergenceWarning)
Iteration: 1, Func. Count: 5, Neg. LLF: -617.7828747197866
Iteration: 2, Func. Count: 17, Neg. LLF: -617.785667202962
Iteration: 3, Func. Count: 24, Neg. LLF: -617.8018628993217
Iteration: 4, Func. Count: 32, Neg. LLF: -617.8022431317634
Optimization terminated successfully. (Exit mode 0)
Current function value: -617.8022431318269
Iterations: 4
Function evaluations: 43
Gradient evaluations: 4
Iteration: 1, Func. Count: 5, Neg. LLF: -617.7522291373377
Iteration: 2, Func. Count: 17, Neg. LLF: -617.754535272941
Iteration: 3, Func. Count: 26, Neg. LLF: -617.7545897332152
Iteration: 4, Func. Count: 38, Neg. LLF: -617.7546406486748
Iteration: 5, Func. Count: 48, Neg. LLF: -617.7546689091021
Iteration: 6, Func. Count: 55, Neg. LLF: -617.7713001462182
Iteration: 7, Func. Count: 63, Neg. LLF: -617.7716177692048
Iteration: 8, Func. Count: 72, Neg. LLF: -617.7716188069942
Optimization terminated successfully. (Exit mode 0)
Current function value: -617.7716188070135
Iterations: 11
Function evaluations: 83
Gradient evaluations: 8
Iteration: 1, Func. Count: 5, Neg. LLF: -617.7439694255685
Iteration: 2, Func. Count: 17, Neg. LLF: -617.7462467846046
Iteration: 3, Func. Count: 24, Neg. LLF: -617.7513224192229
Positive directional derivative for linesearch (Exit mode 8)
Current function value: -617.7513224195419
Iterations: 7
Function evaluations: 24
Gradient evaluations: 3
/Users/austincostello1/anaconda3/lib/python3.6/site-packages/arch/univariate/base.py:522: ConvergenceWarning: The optimizer returned code 8. The message is: Positive directional derivative for linesearch See scipy.optimize.fmin_slsqp for code meaning. ConvergenceWarning) /Users/austincostello1/anaconda3/lib/python3.6/site-packages/arch/univariate/base.py:522: ConvergenceWarning: The optimizer returned code 8. The message is: Positive directional derivative for linesearch See scipy.optimize.fmin_slsqp for code meaning. ConvergenceWarning)
Iteration: 1, Func. Count: 5, Neg. LLF: -617.5093005460674
Iteration: 2, Func. Count: 17, Neg. LLF: -617.511809728026
Iteration: 3, Func. Count: 24, Neg. LLF: -617.5225743730699
Iteration: 4, Func. Count: 31, Neg. LLF: -617.5234395720261
Iteration: 5, Func. Count: 38, Neg. LLF: -617.5237752460362
Iteration: 6, Func. Count: 47, Neg. LLF: -617.5240014396607
Iteration: 7, Func. Count: 52, Neg. LLF: -617.5241517385207
Optimization terminated successfully. (Exit mode 0)
Current function value: -617.5241517385095
Iterations: 7
Function evaluations: 52
Gradient evaluations: 7
Iteration: 1, Func. Count: 5, Neg. LLF: -617.2768689583211
Iteration: 2, Func. Count: 17, Neg. LLF: -617.2805149614303
Iteration: 3, Func. Count: 24, Neg. LLF: -617.283473465347
Positive directional derivative for linesearch (Exit mode 8)
Current function value: -617.2834734649837
Iterations: 7
Function evaluations: 24
Gradient evaluations: 3
Iteration: 1, Func. Count: 5, Neg. LLF: -617.47831372674
Iteration: 2, Func. Count: 17, Neg. LLF: -617.4816825575017
Iteration: 3, Func. Count: 27, Neg. LLF: -617.4816839396797
Iteration: 4, Func. Count: 34, Neg. LLF: -617.4857295797315
Iteration: 5, Func. Count: 41, Neg. LLF: -617.4921703568198
Iteration: 6, Func. Count: 50, Neg. LLF: -617.4921883365566
Iteration: 7, Func. Count: 56, Neg. LLF: -617.4922186566887
Iteration: 8, Func. Count: 62, Neg. LLF: -617.4922419372579
Optimization terminated successfully. (Exit mode 0)
Current function value: -617.4922419372579
Iterations: 9
Function evaluations: 62
Gradient evaluations: 8
Iteration: 1, Func. Count: 5, Neg. LLF: -617.3506667429708
Iteration: 2, Func. Count: 17, Neg. LLF: -617.3552894110871
Iteration: 3, Func. Count: 25, Neg. LLF: -617.3568129618252
Positive directional derivative for linesearch (Exit mode 8)
Current function value: -617.3568129538619
Iterations: 7
Function evaluations: 25
Gradient evaluations: 3
/Users/austincostello1/anaconda3/lib/python3.6/site-packages/arch/univariate/base.py:522: ConvergenceWarning: The optimizer returned code 8. The message is: Positive directional derivative for linesearch See scipy.optimize.fmin_slsqp for code meaning. ConvergenceWarning)
Iteration: 1, Func. Count: 5, Neg. LLF: -617.5442590471339
Iteration: 2, Func. Count: 17, Neg. LLF: -617.5502755551065
Iteration: 3, Func. Count: 24, Neg. LLF: -617.5625432320895
Iteration: 4, Func. Count: 32, Neg. LLF: -617.5633609013546
Iteration: 5, Func. Count: 40, Neg. LLF: -617.5634163805114
Iteration: 6, Func. Count: 48, Neg. LLF: -617.56342188844
Iteration: 7, Func. Count: 53, Neg. LLF: -617.5634229074385
Optimization terminated successfully. (Exit mode 0)
Current function value: -617.5634229074346
Iterations: 7
Function evaluations: 53
Gradient evaluations: 7
Iteration: 1, Func. Count: 5, Neg. LLF: -616.9686539378316
Iteration: 2, Func. Count: 17, Neg. LLF: -616.9868539938482
Iteration: 3, Func. Count: 25, Neg. LLF: -616.9912242307805
Iteration: 4, Func. Count: 32, Neg. LLF: -616.9930961521986
Iteration: 5, Func. Count: 40, Neg. LLF: -616.9931178277376
Iteration: 6, Func. Count: 45, Neg. LLF: -616.9931395145154
Optimization terminated successfully. (Exit mode 0)
Current function value: -616.9931395145252
Iterations: 6
Function evaluations: 45
Gradient evaluations: 6
Iteration: 1, Func. Count: 5, Neg. LLF: -620.130804640338
Iteration: 2, Func. Count: 17, Neg. LLF: -620.1384743989581
Iteration: 3, Func. Count: 26, Neg. LLF: -620.1387388879054
Iteration: 4, Func. Count: 33, Neg. LLF: -620.1408171117705
Iteration: 5, Func. Count: 38, Neg. LLF: -620.1408446996589
Iteration: 6, Func. Count: 45, Neg. LLF: -620.1408458104011
Optimization terminated successfully. (Exit mode 0)
Current function value: -620.1408458103797
Iterations: 6
Function evaluations: 45
Gradient evaluations: 6
Iteration: 1, Func. Count: 5, Neg. LLF: -620.1394213565013
Iteration: 2, Func. Count: 17, Neg. LLF: -620.1486465792299
Iteration: 3, Func. Count: 26, Neg. LLF: -620.1488487142616
Iteration: 4, Func. Count: 34, Neg. LLF: -620.1493500943011
Optimization terminated successfully. (Exit mode 0)
Current function value: -620.1493500942593
Iterations: 4
Function evaluations: 45
Gradient evaluations: 4
Iteration: 1, Func. Count: 5, Neg. LLF: -620.3974268340625
Iteration: 2, Func. Count: 17, Neg. LLF: -620.4089457083303
Iteration: 3, Func. Count: 26, Neg. LLF: -620.4091027465306
Iteration: 4, Func. Count: 33, Neg. LLF: -620.4100472670128
Iteration: 5, Func. Count: 38, Neg. LLF: -620.4101917246894
Iteration: 6, Func. Count: 44, Neg. LLF: -620.4101936374021
Optimization terminated successfully. (Exit mode 0)
Current function value: -620.4101936374554
Iterations: 6
Function evaluations: 44
Gradient evaluations: 6
Iteration: 1, Func. Count: 5, Neg. LLF: -621.5277682749232
Iteration: 2, Func. Count: 17, Neg. LLF: -621.5429490112436
Iteration: 3, Func. Count: 24, Neg. LLF: -621.5467638640596
Iteration: 4, Func. Count: 31, Neg. LLF: -621.5514219766262
Iteration: 5, Func. Count: 39, Neg. LLF: -621.5514924796344
Iteration: 6, Func. Count: 45, Neg. LLF: -621.5515029539331
Optimization terminated successfully. (Exit mode 0)
Current function value: -621.5515029539129
Iterations: 6
Function evaluations: 56
Gradient evaluations: 6
Iteration: 1, Func. Count: 5, Neg. LLF: -621.5491711045157
Iteration: 2, Func. Count: 17, Neg. LLF: -621.5674117916285
Iteration: 3, Func. Count: 26, Neg. LLF: -621.5674333983377
Iteration: 4, Func. Count: 35, Neg. LLF: -621.5674899439596
Iteration: 5, Func. Count: 44, Neg. LLF: -621.5674980851242
Iteration: 6, Func. Count: 56, Neg. LLF: -621.5675543254771
Iteration: 7, Func. Count: 63, Neg. LLF: -621.5797588244363
Iteration: 8, Func. Count: 71, Neg. LLF: -621.5805511889986
Iteration: 9, Func. Count: 80, Neg. LLF: -621.580557040254
Iteration: 10, Func. Count: 86, Neg. LLF: -621.5805808737621
Optimization terminated successfully. (Exit mode 0)
Current function value: -621.580581131969
Iterations: 12
Function evaluations: 87
Gradient evaluations: 10
Iteration: 1, Func. Count: 5, Neg. LLF: -621.7072213307675
Iteration: 2, Func. Count: 17, Neg. LLF: -621.7218101329968
Iteration: 3, Func. Count: 26, Neg. LLF: -621.7218267594974
Iteration: 4, Func. Count: 33, Neg. LLF: -621.7345898276103
Iteration: 5, Func. Count: 41, Neg. LLF: -621.734719968236
Iteration: 6, Func. Count: 49, Neg. LLF: -621.7365277424094
Iteration: 7, Func. Count: 57, Neg. LLF: -621.7367586786227
Optimization terminated successfully. (Exit mode 0)
Current function value: -621.736758678674
Iterations: 8
Function evaluations: 68
Gradient evaluations: 7
Iteration: 1, Func. Count: 5, Neg. LLF: -622.9264826446397
Iteration: 2, Func. Count: 17, Neg. LLF: -622.9389376772051
Positive directional derivative for linesearch (Exit mode 8)
Current function value: -622.9389376548299
Iterations: 6
Function evaluations: 17
Gradient evaluations: 2
Iteration: 1, Func. Count: 5, Neg. LLF: -622.7288256837406
Iteration: 2, Func. Count: 17, Neg. LLF: -622.740960055419
Iteration: 3, Func. Count: 24, Neg. LLF: -622.7605735890825
Iteration: 4, Func. Count: 32, Neg. LLF: -622.7614437734112
Iteration: 5, Func. Count: 40, Neg. LLF: -622.7637823299422
Iteration: 6, Func. Count: 48, Neg. LLF: -622.7638707011939
Iteration: 7, Func. Count: 53, Neg. LLF: -622.763892171665
Optimization terminated successfully. (Exit mode 0)
Current function value: -622.7638921716408
Iterations: 7
Function evaluations: 53
Gradient evaluations: 7
/Users/austincostello1/anaconda3/lib/python3.6/site-packages/arch/univariate/base.py:522: ConvergenceWarning: The optimizer returned code 8. The message is: Positive directional derivative for linesearch See scipy.optimize.fmin_slsqp for code meaning. ConvergenceWarning)
Iteration: 1, Func. Count: 5, Neg. LLF: -621.6638288095187
Iteration: 2, Func. Count: 17, Neg. LLF: -621.6767861199934
Iteration: 3, Func. Count: 24, Neg. LLF: -621.6910350191458
Iteration: 4, Func. Count: 31, Neg. LLF: -621.6928887955114
Iteration: 5, Func. Count: 39, Neg. LLF: -621.692910057207
Iteration: 6, Func. Count: 46, Neg. LLF: -621.6935428435552
Optimization terminated successfully. (Exit mode 0)
Current function value: -621.6935429123029
Iterations: 6
Function evaluations: 49
Gradient evaluations: 6
Iteration: 1, Func. Count: 5, Neg. LLF: -621.1676411232609
Iteration: 2, Func. Count: 17, Neg. LLF: -621.1759442303502
Iteration: 3, Func. Count: 24, Neg. LLF: -621.1997934502301
Iteration: 4, Func. Count: 32, Neg. LLF: -621.1998328708644
Iteration: 5, Func. Count: 40, Neg. LLF: -621.1998810926754
Optimization terminated successfully. (Exit mode 0)
Current function value: -621.1998810926157
Iterations: 5
Function evaluations: 40
Gradient evaluations: 5
Iteration: 1, Func. Count: 5, Neg. LLF: -621.1085894592517
Iteration: 2, Func. Count: 17, Neg. LLF: -621.117237587453
Iteration: 3, Func. Count: 26, Neg. LLF: -621.1172552679143
Iteration: 4, Func. Count: 33, Neg. LLF: -621.127710202262
Iteration: 5, Func. Count: 42, Neg. LLF: -621.1277370232635
Optimization terminated successfully. (Exit mode 0)
Current function value: -621.1277373270077
Iterations: 5
Function evaluations: 43
Gradient evaluations: 5
Iteration: 1, Func. Count: 5, Neg. LLF: -619.6958516745573
Iteration: 2, Func. Count: 17, Neg. LLF: -619.7002313156488
Iteration: 3, Func. Count: 26, Neg. LLF: -619.7006276530767
Iteration: 4, Func. Count: 33, Neg. LLF: -619.7038008899284
Iteration: 5, Func. Count: 39, Neg. LLF: -619.7038061827615
Iteration: 6, Func. Count: 45, Neg. LLF: -619.7038373576861
Optimization terminated successfully. (Exit mode 0)
Current function value: -619.7038373577009
Iterations: 6
Function evaluations: 56
Gradient evaluations: 6
Iteration: 1, Func. Count: 5, Neg. LLF: -619.9690612874952
Iteration: 2, Func. Count: 17, Neg. LLF: -619.9738088422635
Iteration: 3, Func. Count: 26, Neg. LLF: -619.9740263293775
Iteration: 4, Func. Count: 34, Neg. LLF: -619.9745410675683
Optimization terminated successfully. (Exit mode 0)
Current function value: -619.9745410675266
Iterations: 4
Function evaluations: 45
Gradient evaluations: 4
Iteration: 1, Func. Count: 5, Neg. LLF: -620.0637142966007
Iteration: 2, Func. Count: 17, Neg. LLF: -620.0668557635818
Iteration: 3, Func. Count: 26, Neg. LLF: -620.0670334337256
Iteration: 4, Func. Count: 34, Neg. LLF: -620.0674800988265
Optimization terminated successfully. (Exit mode 0)
Current function value: -620.0674800987906
Iterations: 4
Function evaluations: 45
Gradient evaluations: 4
Iteration: 1, Func. Count: 5, Neg. LLF: -618.4986275933088
Iteration: 2, Func. Count: 17, Neg. LLF: -618.5000530885852
Iteration: 3, Func. Count: 25, Neg. LLF: -618.5039584201081
Iteration: 4, Func. Count: 32, Neg. LLF: -618.5046411856906
Iteration: 5, Func. Count: 39, Neg. LLF: -618.5046769061215
Optimization terminated successfully. (Exit mode 0)
Current function value: -618.5046779003696
Iterations: 5
Function evaluations: 40
Gradient evaluations: 5
Iteration: 1, Func. Count: 5, Neg. LLF: -618.1051289363702
Iteration: 2, Func. Count: 17, Neg. LLF: -618.1088484926811
Iteration: 3, Func. Count: 26, Neg. LLF: -618.1095754423443
Iteration: 4, Func. Count: 34, Neg. LLF: -618.114723808118
Iteration: 5, Func. Count: 40, Neg. LLF: -618.117468525596
Iteration: 6, Func. Count: 48, Neg. LLF: -618.1174986632784
Iteration: 7, Func. Count: 53, Neg. LLF: -618.1216053872845
Iteration: 8, Func. Count: 58, Neg. LLF: -618.1222270869293
Iteration: 9, Func. Count: 63, Neg. LLF: -618.1222282387571
Optimization terminated successfully. (Exit mode 0)
Current function value: -618.1222282387541
Iterations: 10
Function evaluations: 63
Gradient evaluations: 9
Iteration: 1, Func. Count: 5, Neg. LLF: -618.0044907947738
Iteration: 2, Func. Count: 17, Neg. LLF: -618.0070359119607
Positive directional derivative for linesearch (Exit mode 8)
Current function value: -618.0070359220022
Iterations: 6
Function evaluations: 17
Gradient evaluations: 2
Iteration: 1, Func. Count: 5, Neg. LLF: -618.2146548473685
Iteration: 2, Func. Count: 17, Neg. LLF: -618.2160885079136
Iteration: 3, Func. Count: 25, Neg. LLF: -618.2215888334752
Iteration: 4, Func. Count: 32, Neg. LLF: -618.2285363030699
Iteration: 5, Func. Count: 39, Neg. LLF: -618.2303213840396
Iteration: 6, Func. Count: 45, Neg. LLF: -618.230558089246
Iteration: 7, Func. Count: 51, Neg. LLF: -618.230567056384
Optimization terminated successfully. (Exit mode 0)
Current function value: -618.230567071943
Iterations: 7
Function evaluations: 55
Gradient evaluations: 7
Iteration: 1, Func. Count: 5, Neg. LLF: -617.1764975809215
Iteration: 2, Func. Count: 17, Neg. LLF: -617.1773708571092
Iteration: 3, Func. Count: 24, Neg. LLF: -617.2094610822031
Iteration: 4, Func. Count: 32, Neg. LLF: -617.2095772146771
Iteration: 5, Func. Count: 40, Neg. LLF: -617.2123521887878
Iteration: 6, Func. Count: 48, Neg. LLF: -617.212404499456
Iteration: 7, Func. Count: 53, Neg. LLF: -617.2124187757654
Optimization terminated successfully. (Exit mode 0)
Current function value: -617.2124187757572
Iterations: 7
Function evaluations: 53
Gradient evaluations: 7
/Users/austincostello1/anaconda3/lib/python3.6/site-packages/arch/univariate/base.py:522: ConvergenceWarning: The optimizer returned code 8. The message is: Positive directional derivative for linesearch See scipy.optimize.fmin_slsqp for code meaning. ConvergenceWarning) /Users/austincostello1/anaconda3/lib/python3.6/site-packages/arch/univariate/base.py:522: ConvergenceWarning: The optimizer returned code 8. The message is: Positive directional derivative for linesearch See scipy.optimize.fmin_slsqp for code meaning. ConvergenceWarning) /Users/austincostello1/anaconda3/lib/python3.6/site-packages/arch/univariate/base.py:522: ConvergenceWarning: The optimizer returned code 8. The message is: Positive directional derivative for linesearch See scipy.optimize.fmin_slsqp for code meaning. ConvergenceWarning)
Iteration: 1, Func. Count: 5, Neg. LLF: -617.0138734983462
Iteration: 2, Func. Count: 17, Neg. LLF: -617.0156094503106
Iteration: 3, Func. Count: 26, Neg. LLF: -617.01593637277
Iteration: 4, Func. Count: 38, Neg. LLF: -617.0162550559311
Positive directional derivative for linesearch (Exit mode 8)
Current function value: -617.0162550407014
Iterations: 8
Function evaluations: 38
Gradient evaluations: 4
Iteration: 1, Func. Count: 5, Neg. LLF: -616.9602622153836
Iteration: 2, Func. Count: 17, Neg. LLF: -616.9620553108432
Positive directional derivative for linesearch (Exit mode 8)
Current function value: -616.9620552951792
Iterations: 6
Function evaluations: 17
Gradient evaluations: 2
Iteration: 1, Func. Count: 5, Neg. LLF: -616.9947813923013
Iteration: 2, Func. Count: 17, Neg. LLF: -616.9967760375789
Iteration: 3, Func. Count: 26, Neg. LLF: -616.9968006971228
Iteration: 4, Func. Count: 33, Neg. LLF: -617.0040290166116
Iteration: 5, Func. Count: 45, Neg. LLF: -617.0042078898248
Positive directional derivative for linesearch (Exit mode 8)
Current function value: -617.0042078968083
Iterations: 9
Function evaluations: 45
Gradient evaluations: 5
Iteration: 1, Func. Count: 5, Neg. LLF: -616.9859110547064
Iteration: 2, Func. Count: 17, Neg. LLF: -616.9879252150491
Iteration: 3, Func. Count: 25, Neg. LLF: -616.9889173566908
Iteration: 4, Func. Count: 32, Neg. LLF: -617.0012771294505
Iteration: 5, Func. Count: 39, Neg. LLF: -617.002705564058
Iteration: 6, Func. Count: 47, Neg. LLF: -617.002707826423
Iteration: 7, Func. Count: 54, Neg. LLF: -617.0028728364355
Optimization terminated successfully. (Exit mode 0)
Current function value: -617.0028728363839
Iterations: 7
Function evaluations: 54
Gradient evaluations: 7
/Users/austincostello1/anaconda3/lib/python3.6/site-packages/arch/univariate/base.py:522: ConvergenceWarning: The optimizer returned code 8. The message is: Positive directional derivative for linesearch See scipy.optimize.fmin_slsqp for code meaning. ConvergenceWarning) /Users/austincostello1/anaconda3/lib/python3.6/site-packages/arch/univariate/base.py:522: ConvergenceWarning: The optimizer returned code 8. The message is: Positive directional derivative for linesearch See scipy.optimize.fmin_slsqp for code meaning. ConvergenceWarning) /Users/austincostello1/anaconda3/lib/python3.6/site-packages/arch/univariate/base.py:522: ConvergenceWarning: The optimizer returned code 8. The message is: Positive directional derivative for linesearch See scipy.optimize.fmin_slsqp for code meaning. ConvergenceWarning)
Iteration: 1, Func. Count: 5, Neg. LLF: -616.9368951820823
Iteration: 2, Func. Count: 17, Neg. LLF: -616.9392142038644
Positive directional derivative for linesearch (Exit mode 8)
Current function value: -616.9392141894116
Iterations: 6
Function evaluations: 17
Gradient evaluations: 2
Iteration: 1, Func. Count: 5, Neg. LLF: -616.9053426038581
Iteration: 2, Func. Count: 17, Neg. LLF: -616.9073805542687
Positive directional derivative for linesearch (Exit mode 8)
Current function value: -616.9073805394572
Iterations: 6
Function evaluations: 17
Gradient evaluations: 2
Iteration: 1, Func. Count: 5, Neg. LLF: -615.8723157707168
Iteration: 2, Func. Count: 17, Neg. LLF: -615.874469421007
Iteration: 3, Func. Count: 24, Neg. LLF: -615.878735587926
Iteration: 4, Func. Count: 32, Neg. LLF: -615.8790889246226
Iteration: 5, Func. Count: 41, Neg. LLF: -615.8790935904859
Optimization terminated successfully. (Exit mode 0)
Current function value: -615.8790936618245
Iterations: 5
Function evaluations: 44
Gradient evaluations: 5
Iteration: 1, Func. Count: 5, Neg. LLF: -615.2979023888228
Iteration: 2, Func. Count: 17, Neg. LLF: -615.2992724076223
Iteration: 3, Func. Count: 24, Neg. LLF: -615.3060735938608
Iteration: 4, Func. Count: 32, Neg. LLF: -615.3061703520123
Optimization terminated successfully. (Exit mode 0)
Current function value: -615.3061707656034
Iterations: 4
Function evaluations: 38
Gradient evaluations: 4
Iteration: 1, Func. Count: 5, Neg. LLF: -615.5873174193271
Iteration: 2, Func. Count: 17, Neg. LLF: -615.5897554957278
Iteration: 3, Func. Count: 24, Neg. LLF: -615.5907913380636
Positive directional derivative for linesearch (Exit mode 8)
Current function value: -615.5907913401011
Iterations: 7
Function evaluations: 24
Gradient evaluations: 3
Iteration: 1, Func. Count: 5, Neg. LLF: -615.2546177600195
Iteration: 2, Func. Count: 17, Neg. LLF: -615.2563881215879
Iteration: 3, Func. Count: 24, Neg. LLF: -615.2574873861333
Iteration: 4, Func. Count: 32, Neg. LLF: -615.2575084619889
Optimization terminated successfully. (Exit mode 0)
Current function value: -615.2575084620157
Iterations: 4
Function evaluations: 43
Gradient evaluations: 4
/Users/austincostello1/anaconda3/lib/python3.6/site-packages/arch/univariate/base.py:522: ConvergenceWarning: The optimizer returned code 8. The message is: Positive directional derivative for linesearch See scipy.optimize.fmin_slsqp for code meaning. ConvergenceWarning)
Iteration: 1, Func. Count: 5, Neg. LLF: -615.2360511168179
Iteration: 2, Func. Count: 17, Neg. LLF: -615.2379785402394
Iteration: 3, Func. Count: 24, Neg. LLF: -615.2390689253267
Iteration: 4, Func. Count: 32, Neg. LLF: -615.2393247560576
Iteration: 5, Func. Count: 41, Neg. LLF: -615.2393263470781
Optimization terminated successfully. (Exit mode 0)
Current function value: -615.2393263474033
Iterations: 5
Function evaluations: 52
Gradient evaluations: 5
Iteration: 1, Func. Count: 5, Neg. LLF: -615.5794588733806
Iteration: 2, Func. Count: 17, Neg. LLF: -615.5817816227118
Iteration: 3, Func. Count: 24, Neg. LLF: -615.5841159996342
Iteration: 4, Func. Count: 32, Neg. LLF: -615.5843842874888
Iteration: 5, Func. Count: 41, Neg. LLF: -615.5843897468073
Optimization terminated successfully. (Exit mode 0)
Current function value: -615.5843897469119
Iterations: 5
Function evaluations: 52
Gradient evaluations: 5
Iteration: 1, Func. Count: 5, Neg. LLF: -615.6495736533207
Iteration: 2, Func. Count: 17, Neg. LLF: -615.6507103125612
Positive directional derivative for linesearch (Exit mode 8)
Current function value: -615.6507103024542
Iterations: 6
Function evaluations: 17
Gradient evaluations: 2
/Users/austincostello1/anaconda3/lib/python3.6/site-packages/arch/univariate/base.py:522: ConvergenceWarning: The optimizer returned code 8. The message is: Positive directional derivative for linesearch See scipy.optimize.fmin_slsqp for code meaning. ConvergenceWarning)
Iteration: 1, Func. Count: 5, Neg. LLF: -615.3497622296895
Iteration: 2, Func. Count: 17, Neg. LLF: -615.3504505487256
Iteration: 3, Func. Count: 25, Neg. LLF: -615.3506082283911
Iteration: 4, Func. Count: 32, Neg. LLF: -615.3536766511879
Iteration: 5, Func. Count: 40, Neg. LLF: -615.3537792261881
Iteration: 6, Func. Count: 48, Neg. LLF: -615.3537905854655
Optimization terminated successfully. (Exit mode 0)
Current function value: -615.3537905855014
Iterations: 6
Function evaluations: 48
Gradient evaluations: 6
Iteration: 1, Func. Count: 5, Neg. LLF: -615.2461501394835
Iteration: 2, Func. Count: 17, Neg. LLF: -615.2462467342482
Iteration: 3, Func. Count: 24, Neg. LLF: -615.2465720714879
Iteration: 4, Func. Count: 32, Neg. LLF: -615.2472957272748
Iteration: 5, Func. Count: 40, Neg. LLF: -615.247297747915
Optimization terminated successfully. (Exit mode 0)
Current function value: -615.2472977484081
Iterations: 5
Function evaluations: 51
Gradient evaluations: 5
Iteration: 1, Func. Count: 5, Neg. LLF: -615.0630485080125
Iteration: 2, Func. Count: 17, Neg. LLF: -615.0630954372266
Iteration: 3, Func. Count: 25, Neg. LLF: -615.063242246885
Iteration: 4, Func. Count: 33, Neg. LLF: -615.0632885240161
Optimization terminated successfully. (Exit mode 0)
Current function value: -615.0632885240633
Iterations: 4
Function evaluations: 33
Gradient evaluations: 4
Iteration: 1, Func. Count: 5, Neg. LLF: -615.1567258852554
Iteration: 2, Func. Count: 17, Neg. LLF: -615.1568915502571
Iteration: 3, Func. Count: 24, Neg. LLF: -615.1583038351089
Iteration: 4, Func. Count: 32, Neg. LLF: -615.1589068075392
Iteration: 5, Func. Count: 40, Neg. LLF: -615.1590492022021
Optimization terminated successfully. (Exit mode 0)
Current function value: -615.1590494251695
Iterations: 5
Function evaluations: 42
Gradient evaluations: 5
Iteration: 1, Func. Count: 5, Neg. LLF: -614.4542349112395
Iteration: 2, Func. Count: 17, Neg. LLF: -614.4545402578319
Iteration: 3, Func. Count: 26, Neg. LLF: -614.4545583097802
Iteration: 4, Func. Count: 34, Neg. LLF: -614.4545987939305
Iteration: 5, Func. Count: 40, Neg. LLF: -614.4546023683884
Optimization terminated successfully. (Exit mode 0)
Current function value: -614.454602368365
Iterations: 5
Function evaluations: 40
Gradient evaluations: 5
Iteration: 1, Func. Count: 5, Neg. LLF: -614.6065311440976
Iteration: 2, Func. Count: 17, Neg. LLF: -614.6065637528707
Iteration: 3, Func. Count: 26, Neg. LLF: -614.6065799302769
Iteration: 4, Func. Count: 33, Neg. LLF: -614.6081551466291
Iteration: 5, Func. Count: 40, Neg. LLF: -614.6081575271672
Optimization terminated successfully. (Exit mode 0)
Current function value: -614.6081584639685
Iterations: 5
Function evaluations: 41
Gradient evaluations: 5
Iteration: 1, Func. Count: 5, Neg. LLF: -614.7526484143377
Iteration: 2, Func. Count: 17, Neg. LLF: -614.7559997369272
Iteration: 3, Func. Count: 25, Neg. LLF: -614.7565828501417
Iteration: 4, Func. Count: 33, Neg. LLF: -614.7569373540593
Optimization terminated successfully. (Exit mode 0)
Current function value: -614.7569373540227
Iterations: 4
Function evaluations: 44
Gradient evaluations: 4
Iteration: 1, Func. Count: 5, Neg. LLF: -616.5942190159838
Iteration: 2, Func. Count: 17, Neg. LLF: -616.5969175067636
Iteration: 3, Func. Count: 27, Neg. LLF: -616.5969229214882
Iteration: 4, Func. Count: 34, Neg. LLF: -616.5997156727703
Iteration: 5, Func. Count: 39, Neg. LLF: -616.5997177197746
Optimization terminated successfully. (Exit mode 0)
Current function value: -616.599717738903
Iterations: 5
Function evaluations: 43
Gradient evaluations: 5
Iteration: 1, Func. Count: 5, Neg. LLF: -616.826064461221
Iteration: 2, Func. Count: 17, Neg. LLF: -616.8288008510691
Iteration: 3, Func. Count: 26, Neg. LLF: -616.8288452152652
Iteration: 4, Func. Count: 33, Neg. LLF: -616.832690924554
Optimization terminated successfully. (Exit mode 0)
Current function value: -616.8326909246871
Iterations: 4
Function evaluations: 33
Gradient evaluations: 4
Iteration: 1, Func. Count: 5, Neg. LLF: -616.8314842160619
Iteration: 2, Func. Count: 17, Neg. LLF: -616.8357712853242
Iteration: 3, Func. Count: 26, Neg. LLF: -616.835813756896
Iteration: 4, Func. Count: 33, Neg. LLF: -616.8408152246598
Optimization terminated successfully. (Exit mode 0)
Current function value: -616.8408152247378
Iterations: 4
Function evaluations: 33
Gradient evaluations: 4
Iteration: 1, Func. Count: 5, Neg. LLF: -616.4733781559148
Iteration: 2, Func. Count: 17, Neg. LLF: -616.4800180597084
Iteration: 3, Func. Count: 24, Neg. LLF: -616.4832737634337
Iteration: 4, Func. Count: 31, Neg. LLF: -616.4838905150007
Iteration: 5, Func. Count: 39, Neg. LLF: -616.4838932529826
Iteration: 6, Func. Count: 48, Neg. LLF: -616.4845831996329
Optimization terminated successfully. (Exit mode 0)
Current function value: -616.4845831998664
Iterations: 6
Function evaluations: 59
Gradient evaluations: 6
Iteration: 1, Func. Count: 5, Neg. LLF: -617.803178701015
Iteration: 2, Func. Count: 17, Neg. LLF: -617.8106203750237
Iteration: 3, Func. Count: 26, Neg. LLF: -617.8106278273173
Iteration: 4, Func. Count: 34, Neg. LLF: -617.8115018277897
Iteration: 5, Func. Count: 42, Neg. LLF: -617.8115513124312
Iteration: 6, Func. Count: 51, Neg. LLF: -617.811567469276
Optimization terminated successfully. (Exit mode 0)
Current function value: -617.8115678729591
Iterations: 7
Function evaluations: 53
Gradient evaluations: 6
Iteration: 1, Func. Count: 5, Neg. LLF: -617.8949098883583
Iteration: 2, Func. Count: 17, Neg. LLF: -617.897972749752
Iteration: 3, Func. Count: 24, Neg. LLF: -617.9094848398842
Iteration: 4, Func. Count: 32, Neg. LLF: -617.9097305341032
Iteration: 5, Func. Count: 40, Neg. LLF: -617.9104872684794
Iteration: 6, Func. Count: 49, Neg. LLF: -617.9104894557613
Optimization terminated successfully. (Exit mode 0)
Current function value: -617.9104901114412
Iterations: 6
Function evaluations: 51
Gradient evaluations: 6
Iteration: 1, Func. Count: 5, Neg. LLF: -618.7807434726463
Iteration: 2, Func. Count: 17, Neg. LLF: -618.7855945313612
Iteration: 3, Func. Count: 24, Neg. LLF: -618.7953844026891
Iteration: 4, Func. Count: 32, Neg. LLF: -618.7955179764251
Iteration: 5, Func. Count: 40, Neg. LLF: -618.7955577210142
Optimization terminated successfully. (Exit mode 0)
Current function value: -618.7955577210507
Iterations: 5
Function evaluations: 40
Gradient evaluations: 5
Iteration: 1, Func. Count: 5, Neg. LLF: -619.0762031130491
Iteration: 2, Func. Count: 17, Neg. LLF: -619.0884866374735
Iteration: 3, Func. Count: 26, Neg. LLF: -619.0886582802434
Iteration: 4, Func. Count: 33, Neg. LLF: -619.0895381480782
Optimization terminated successfully. (Exit mode 0)
Current function value: -619.0895381482576
Iterations: 4
Function evaluations: 44
Gradient evaluations: 4
Iteration: 1, Func. Count: 5, Neg. LLF: -620.3171326814675
Iteration: 2, Func. Count: 17, Neg. LLF: -620.320808238258
Iteration: 3, Func. Count: 24, Neg. LLF: -620.3451315542834
Iteration: 4, Func. Count: 31, Neg. LLF: -620.3541301350771
Iteration: 5, Func. Count: 39, Neg. LLF: -620.354227016343
Iteration: 6, Func. Count: 47, Neg. LLF: -620.354237616065
Iteration: 7, Func. Count: 52, Neg. LLF: -620.3542420170643
Optimization terminated successfully. (Exit mode 0)
Current function value: -620.3542420170653
Iterations: 7
Function evaluations: 52
Gradient evaluations: 7
Iteration: 1, Func. Count: 5, Neg. LLF: -621.6414860338543
Iteration: 2, Func. Count: 17, Neg. LLF: -621.648159290863
Iteration: 3, Func. Count: 25, Neg. LLF: -621.6485708823675
Iteration: 4, Func. Count: 32, Neg. LLF: -621.649704750443
Iteration: 5, Func. Count: 40, Neg. LLF: -621.6502440822517
Optimization terminated successfully. (Exit mode 0)
Current function value: -621.6502442452189
Iterations: 5
Function evaluations: 42
Gradient evaluations: 5
Iteration: 1, Func. Count: 5, Neg. LLF: -620.1107869938576
Iteration: 2, Func. Count: 17, Neg. LLF: -620.1108137443571
Iteration: 3, Func. Count: 25, Neg. LLF: -620.1160095101175
Iteration: 4, Func. Count: 31, Neg. LLF: -620.1225008715957
Iteration: 5, Func. Count: 39, Neg. LLF: -620.1229358316734
Iteration: 6, Func. Count: 45, Neg. LLF: -620.1237169634691
Iteration: 7, Func. Count: 50, Neg. LLF: -620.1241379215128
Iteration: 8, Func. Count: 55, Neg. LLF: -620.1242434083274
Optimization terminated successfully. (Exit mode 0)
Current function value: -620.124243408356
Iterations: 8
Function evaluations: 55
Gradient evaluations: 8
Iteration: 1, Func. Count: 5, Neg. LLF: -620.7596095013529
Iteration: 2, Func. Count: 17, Neg. LLF: -620.7640391760717
Iteration: 3, Func. Count: 25, Neg. LLF: -620.7646040606307
Iteration: 4, Func. Count: 33, Neg. LLF: -620.7649881748767
Optimization terminated successfully. (Exit mode 0)
Current function value: -620.7649882279765
Iterations: 4
Function evaluations: 40
Gradient evaluations: 4
Iteration: 1, Func. Count: 5, Neg. LLF: -622.8453259235705
Iteration: 2, Func. Count: 17, Neg. LLF: -622.8492293561676
Iteration: 3, Func. Count: 24, Neg. LLF: -622.8506269059466
Iteration: 4, Func. Count: 32, Neg. LLF: -622.8509106047277
Iteration: 5, Func. Count: 39, Neg. LLF: -622.851146211905
Optimization terminated successfully. (Exit mode 0)
Current function value: -622.8511462906803
Iterations: 5
Function evaluations: 43
Gradient evaluations: 5
Iteration: 1, Func. Count: 5, Neg. LLF: -622.8371787668482
Iteration: 2, Func. Count: 17, Neg. LLF: -622.8397814550717
Iteration: 3, Func. Count: 24, Neg. LLF: -622.841767386049
Iteration: 4, Func. Count: 32, Neg. LLF: -622.8420011604477
Iteration: 5, Func. Count: 39, Neg. LLF: -622.8421187586684
Optimization terminated successfully. (Exit mode 0)
Current function value: -622.8421187730253
Iterations: 5
Function evaluations: 43
Gradient evaluations: 5
Iteration: 1, Func. Count: 5, Neg. LLF: -623.2352630021285
Iteration: 2, Func. Count: 17, Neg. LLF: -623.237883395245
Iteration: 3, Func. Count: 24, Neg. LLF: -623.241654265718
Iteration: 4, Func. Count: 33, Neg. LLF: -623.2416715932511
Iteration: 5, Func. Count: 41, Neg. LLF: -623.2416815330666
Optimization terminated successfully. (Exit mode 0)
Current function value: -623.2416815330353
Iterations: 5
Function evaluations: 41
Gradient evaluations: 5
Iteration: 1, Func. Count: 5, Neg. LLF: -623.0769436182431
Iteration: 2, Func. Count: 17, Neg. LLF: -623.0807824239992
Iteration: 3, Func. Count: 25, Neg. LLF: -623.0809792497625
Iteration: 4, Func. Count: 32, Neg. LLF: -623.0975022273665
Iteration: 5, Func. Count: 40, Neg. LLF: -623.0978058149054
Iteration: 6, Func. Count: 47, Neg. LLF: -623.097911934741
Iteration: 7, Func. Count: 52, Neg. LLF: -623.0979176242922
Optimization terminated successfully. (Exit mode 0)
Current function value: -623.0979176243198
Iterations: 7
Function evaluations: 52
Gradient evaluations: 7
Iteration: 1, Func. Count: 5, Neg. LLF: -621.4544646831332
Iteration: 2, Func. Count: 17, Neg. LLF: -621.4588697417998
Iteration: 3, Func. Count: 24, Neg. LLF: -621.4664764926388
Iteration: 4, Func. Count: 32, Neg. LLF: -621.4668703503123
Iteration: 5, Func. Count: 39, Neg. LLF: -621.466916518772
Iteration: 6, Func. Count: 47, Neg. LLF: -621.4669436473705
Iteration: 7, Func. Count: 52, Neg. LLF: -621.4669452231819
Optimization terminated successfully. (Exit mode 0)
Current function value: -621.4669452231842
Iterations: 7
Function evaluations: 52
Gradient evaluations: 7
Iteration: 1, Func. Count: 5, Neg. LLF: -621.9076270111514
Iteration: 2, Func. Count: 17, Neg. LLF: -621.9092302839784
Iteration: 3, Func. Count: 26, Neg. LLF: -621.9094942880802
Iteration: 4, Func. Count: 32, Neg. LLF: -621.913028957795
Iteration: 5, Func. Count: 37, Neg. LLF: -621.9132558156313
Iteration: 6, Func. Count: 43, Neg. LLF: -621.9132894843339
Iteration: 7, Func. Count: 48, Neg. LLF: -621.9133058425341
Optimization terminated successfully. (Exit mode 0)
Current function value: -621.9133058425198
Iterations: 7
Function evaluations: 48
Gradient evaluations: 7
Iteration: 1, Func. Count: 5, Neg. LLF: -623.7768634758844
Iteration: 2, Func. Count: 17, Neg. LLF: -623.7795109559709
Iteration: 3, Func. Count: 25, Neg. LLF: -623.7811458549877
Iteration: 4, Func. Count: 31, Neg. LLF: -623.7968167466925
Iteration: 5, Func. Count: 39, Neg. LLF: -623.7968241023748
Iteration: 6, Func. Count: 44, Neg. LLF: -623.798223507052
Iteration: 7, Func. Count: 49, Neg. LLF: -623.7982729603605
Optimization terminated successfully. (Exit mode 0)
Current function value: -623.7982729604125
Iterations: 7
Function evaluations: 49
Gradient evaluations: 7
Iteration: 1, Func. Count: 5, Neg. LLF: -623.3544845005663
Iteration: 2, Func. Count: 17, Neg. LLF: -623.3600294347638
Iteration: 3, Func. Count: 26, Neg. LLF: -623.3610933855799
Iteration: 4, Func. Count: 32, Neg. LLF: -623.3718651474635
Iteration: 5, Func. Count: 37, Neg. LLF: -623.372488305519
Iteration: 6, Func. Count: 44, Neg. LLF: -623.3725619441184
Iteration: 7, Func. Count: 49, Neg. LLF: -623.3726018782725
Iteration: 8, Func. Count: 54, Neg. LLF: -623.3726036700593
Optimization terminated successfully. (Exit mode 0)
Current function value: -623.37260367012
Iterations: 8
Function evaluations: 54
Gradient evaluations: 8
Iteration: 1, Func. Count: 5, Neg. LLF: -624.2868662580008
Iteration: 2, Func. Count: 17, Neg. LLF: -624.2912818045456
Iteration: 3, Func. Count: 26, Neg. LLF: -624.2922738857371
Iteration: 4, Func. Count: 32, Neg. LLF: -624.3073639627595
Iteration: 5, Func. Count: 38, Neg. LLF: -624.3075510314561
Iteration: 6, Func. Count: 44, Neg. LLF: -624.3087304493476
Iteration: 7, Func. Count: 49, Neg. LLF: -624.3087385521487
Optimization terminated successfully. (Exit mode 0)
Current function value: -624.3087385521301
Iterations: 7
Function evaluations: 49
Gradient evaluations: 7
Iteration: 1, Func. Count: 5, Neg. LLF: -624.7842988405296
Iteration: 2, Func. Count: 17, Neg. LLF: -624.7889683054212
Iteration: 3, Func. Count: 26, Neg. LLF: -624.7895612195882
Iteration: 4, Func. Count: 32, Neg. LLF: -624.8004003524194
Iteration: 5, Func. Count: 38, Neg. LLF: -624.8005341014034
Iteration: 6, Func. Count: 44, Neg. LLF: -624.8009915378098
Iteration: 7, Func. Count: 49, Neg. LLF: -624.8009925856154
Optimization terminated successfully. (Exit mode 0)
Current function value: -624.8009925856104
Iterations: 7
Function evaluations: 49
Gradient evaluations: 7
Iteration: 1, Func. Count: 5, Neg. LLF: -624.3413888164829
Iteration: 2, Func. Count: 17, Neg. LLF: -624.3466231300654
Iteration: 3, Func. Count: 26, Neg. LLF: -624.3471274344681
Iteration: 4, Func. Count: 32, Neg. LLF: -624.3539271428754
Iteration: 5, Func. Count: 38, Neg. LLF: -624.3539302071147
Optimization terminated successfully. (Exit mode 0)
Current function value: -624.3539309601592
Iterations: 5
Function evaluations: 40
Gradient evaluations: 5
Iteration: 1, Func. Count: 5, Neg. LLF: -625.4844420248805
Iteration: 2, Func. Count: 17, Neg. LLF: -625.4900576840689
Iteration: 3, Func. Count: 26, Neg. LLF: -625.4901441576338
Iteration: 4, Func. Count: 32, Neg. LLF: -625.4920155190024
Iteration: 5, Func. Count: 37, Neg. LLF: -625.4929048287125
Iteration: 6, Func. Count: 43, Neg. LLF: -625.492923525317
Iteration: 7, Func. Count: 48, Neg. LLF: -625.4929273853547
Optimization terminated successfully. (Exit mode 0)
Current function value: -625.4929273853572
Iterations: 7
Function evaluations: 48
Gradient evaluations: 7
Iteration: 1, Func. Count: 5, Neg. LLF: -625.5421644323218
Iteration: 2, Func. Count: 17, Neg. LLF: -625.5481036523681
Iteration: 3, Func. Count: 26, Neg. LLF: -625.5481336276026
Iteration: 4, Func. Count: 33, Neg. LLF: -625.5495760866443
Optimization terminated successfully. (Exit mode 0)
Current function value: -625.5495761655459
Iterations: 4
Function evaluations: 38
Gradient evaluations: 4
Iteration: 1, Func. Count: 5, Neg. LLF: -625.7221151634286
Iteration: 2, Func. Count: 17, Neg. LLF: -625.7275446131839
Iteration: 3, Func. Count: 27, Neg. LLF: -625.7275467574624
Iteration: 4, Func. Count: 34, Neg. LLF: -625.727956075079
Optimization terminated successfully. (Exit mode 0)
Current function value: -625.7279560751497
Iterations: 4
Function evaluations: 34
Gradient evaluations: 4
Iteration: 1, Func. Count: 5, Neg. LLF: -626.903236219257
Iteration: 2, Func. Count: 17, Neg. LLF: -626.9101338940213
Iteration: 3, Func. Count: 24, Neg. LLF: -626.9111190957088
Iteration: 4, Func. Count: 32, Neg. LLF: -626.9113870273036
Iteration: 5, Func. Count: 39, Neg. LLF: -626.9117372938559
Optimization terminated successfully. (Exit mode 0)
Current function value: -626.9117374978073
Iterations: 5
Function evaluations: 43
Gradient evaluations: 5
Iteration: 1, Func. Count: 5, Neg. LLF: -626.5612681187366
Iteration: 2, Func. Count: 17, Neg. LLF: -626.5682668741158
Iteration: 3, Func. Count: 25, Neg. LLF: -626.5688058481016
Iteration: 4, Func. Count: 33, Neg. LLF: -626.5690953435886
Optimization terminated successfully. (Exit mode 0)
Current function value: -626.5690953440825
Iterations: 4
Function evaluations: 44
Gradient evaluations: 4
Iteration: 1, Func. Count: 5, Neg. LLF: -625.640870415287
Iteration: 2, Func. Count: 17, Neg. LLF: -625.6471065412338
Iteration: 3, Func. Count: 25, Neg. LLF: -625.6472806358912
Iteration: 4, Func. Count: 33, Neg. LLF: -625.6475534292163
Optimization terminated successfully. (Exit mode 0)
Current function value: -625.6475534290466
Iterations: 4
Function evaluations: 44
Gradient evaluations: 4
Iteration: 1, Func. Count: 5, Neg. LLF: -625.6271788271654
Iteration: 2, Func. Count: 17, Neg. LLF: -625.6334165795956
Iteration: 3, Func. Count: 25, Neg. LLF: -625.6334744189073
Iteration: 4, Func. Count: 33, Neg. LLF: -625.6335242611132
Optimization terminated successfully. (Exit mode 0)
Current function value: -625.6335242611788
Iterations: 4
Function evaluations: 33
Gradient evaluations: 4
Iteration: 1, Func. Count: 5, Neg. LLF: -624.4085032751537
Iteration: 2, Func. Count: 17, Neg. LLF: -624.414079060526
Iteration: 3, Func. Count: 26, Neg. LLF: -624.41412056601
Iteration: 4, Func. Count: 33, Neg. LLF: -624.414217153364
Optimization terminated successfully. (Exit mode 0)
Current function value: -624.4142171534647
Iterations: 4
Function evaluations: 44
Gradient evaluations: 4
Iteration: 1, Func. Count: 5, Neg. LLF: -623.9594524155764
Iteration: 2, Func. Count: 17, Neg. LLF: -623.9661617238737
Iteration: 3, Func. Count: 26, Neg. LLF: -623.9662619206708
Iteration: 4, Func. Count: 32, Neg. LLF: -623.9673155099002
Iteration: 5, Func. Count: 37, Neg. LLF: -623.9685029545424
Iteration: 6, Func. Count: 43, Neg. LLF: -623.9685107876467
Iteration: 7, Func. Count: 48, Neg. LLF: -623.9685130013679
Optimization terminated successfully. (Exit mode 0)
Current function value: -623.9685130013697
Iterations: 7
Function evaluations: 48
Gradient evaluations: 7
Iteration: 1, Func. Count: 5, Neg. LLF: -624.0011902680118
Iteration: 2, Func. Count: 17, Neg. LLF: -624.0075192504773
Iteration: 3, Func. Count: 26, Neg. LLF: -624.0076630138179
Iteration: 4, Func. Count: 32, Neg. LLF: -624.0121162914286
Iteration: 5, Func. Count: 37, Neg. LLF: -624.0122144896403
Iteration: 6, Func. Count: 43, Neg. LLF: -624.0122808531758
Iteration: 7, Func. Count: 48, Neg. LLF: -624.0123221200266
Optimization terminated successfully. (Exit mode 0)
Current function value: -624.0123221199049
Iterations: 7
Function evaluations: 48
Gradient evaluations: 7
Iteration: 1, Func. Count: 5, Neg. LLF: -623.9763902837761
Iteration: 2, Func. Count: 17, Neg. LLF: -623.9827954996172
Iteration: 3, Func. Count: 26, Neg. LLF: -623.9829440279416
Iteration: 4, Func. Count: 32, Neg. LLF: -623.9875716041727
Iteration: 5, Func. Count: 37, Neg. LLF: -623.987626651125
Iteration: 6, Func. Count: 43, Neg. LLF: -623.9876978666085
Iteration: 7, Func. Count: 48, Neg. LLF: -623.9877490591615
Optimization terminated successfully. (Exit mode 0)
Current function value: -623.9877490589755
Iterations: 7
Function evaluations: 48
Gradient evaluations: 7
Iteration: 1, Func. Count: 5, Neg. LLF: -623.8647570703222
Iteration: 2, Func. Count: 17, Neg. LLF: -623.8715162781209
Iteration: 3, Func. Count: 26, Neg. LLF: -623.8717201667426
Iteration: 4, Func. Count: 32, Neg. LLF: -623.8774214840276
Iteration: 5, Func. Count: 38, Neg. LLF: -623.8774418260589
Iteration: 6, Func. Count: 44, Neg. LLF: -623.877470672059
Optimization terminated successfully. (Exit mode 0)
Current function value: -623.8774706720396
Iterations: 6
Function evaluations: 44
Gradient evaluations: 6
Iteration: 1, Func. Count: 5, Neg. LLF: -623.8146566518176
Iteration: 2, Func. Count: 17, Neg. LLF: -623.8220120269855
Iteration: 3, Func. Count: 26, Neg. LLF: -623.8222597733676
Iteration: 4, Func. Count: 32, Neg. LLF: -623.829486457244
Iteration: 5, Func. Count: 38, Neg. LLF: -623.8294911215712
Iteration: 6, Func. Count: 43, Neg. LLF: -623.829494621267
Optimization terminated successfully. (Exit mode 0)
Current function value: -623.82949462119
Iterations: 6
Function evaluations: 43
Gradient evaluations: 6
Iteration: 1, Func. Count: 5, Neg. LLF: -623.7156937886571
Iteration: 2, Func. Count: 17, Neg. LLF: -623.7231093830912
Iteration: 3, Func. Count: 26, Neg. LLF: -623.723447118864
Iteration: 4, Func. Count: 32, Neg. LLF: -623.7321332696172
Iteration: 5, Func. Count: 38, Neg. LLF: -623.7321605460979
Iteration: 6, Func. Count: 44, Neg. LLF: -623.7322604527332
Optimization terminated successfully. (Exit mode 0)
Current function value: -623.7322604527399
Iterations: 6
Function evaluations: 44
Gradient evaluations: 6
Iteration: 1, Func. Count: 5, Neg. LLF: -623.0776210427416
Iteration: 2, Func. Count: 17, Neg. LLF: -623.0863193652431
Iteration: 3, Func. Count: 26, Neg. LLF: -623.08690468335
Iteration: 4, Func. Count: 32, Neg. LLF: -623.0968823455869
Iteration: 5, Func. Count: 38, Neg. LLF: -623.0969547387913
Iteration: 6, Func. Count: 44, Neg. LLF: -623.0972662738079
Optimization terminated successfully. (Exit mode 0)
Current function value: -623.0972662739973
Iterations: 6
Function evaluations: 44
Gradient evaluations: 6
Iteration: 1, Func. Count: 5, Neg. LLF: -623.2832659656315
Iteration: 2, Func. Count: 17, Neg. LLF: -623.2931265536258
Iteration: 3, Func. Count: 26, Neg. LLF: -623.2937375250049
Iteration: 4, Func. Count: 32, Neg. LLF: -623.3073586644978
Iteration: 5, Func. Count: 38, Neg. LLF: -623.3075908772357
Iteration: 6, Func. Count: 44, Neg. LLF: -623.3086691724595
Iteration: 7, Func. Count: 49, Neg. LLF: -623.3086802614052
Optimization terminated successfully. (Exit mode 0)
Current function value: -623.3086802613839
Iterations: 7
Function evaluations: 49
Gradient evaluations: 7
Iteration: 1, Func. Count: 5, Neg. LLF: -621.8506717754212
Iteration: 2, Func. Count: 17, Neg. LLF: -621.8588969627034
Iteration: 3, Func. Count: 25, Neg. LLF: -621.8614940389543
Iteration: 4, Func. Count: 31, Neg. LLF: -621.8917979854683
Iteration: 5, Func. Count: 39, Neg. LLF: -621.892510651389
Iteration: 6, Func. Count: 45, Neg. LLF: -621.8939138380904
Iteration: 7, Func. Count: 50, Neg. LLF: -621.8960208332077
Iteration: 8, Func. Count: 55, Neg. LLF: -621.8960751471604
Optimization terminated successfully. (Exit mode 0)
Current function value: -621.8960751472737
Iterations: 8
Function evaluations: 55
Gradient evaluations: 8
Iteration: 1, Func. Count: 5, Neg. LLF: -621.1622481746515
Iteration: 2, Func. Count: 17, Neg. LLF: -621.1694233190918
Iteration: 3, Func. Count: 25, Neg. LLF: -621.1715883678305
Iteration: 4, Func. Count: 31, Neg. LLF: -621.2004301073428
Iteration: 5, Func. Count: 39, Neg. LLF: -621.2004558582565
Iteration: 6, Func. Count: 44, Neg. LLF: -621.2025698722698
Iteration: 7, Func. Count: 49, Neg. LLF: -621.2027303963646
Optimization terminated successfully. (Exit mode 0)
Current function value: -621.2027303964815
Iterations: 7
Function evaluations: 49
Gradient evaluations: 7
Iteration: 1, Func. Count: 5, Neg. LLF: -621.3332772030662
Iteration: 2, Func. Count: 17, Neg. LLF: -621.3420678481577
Iteration: 3, Func. Count: 24, Neg. LLF: -621.344452917855
Iteration: 4, Func. Count: 30, Neg. LLF: -621.3834933481962
Iteration: 5, Func. Count: 38, Neg. LLF: -621.3842943958477
Iteration: 6, Func. Count: 44, Neg. LLF: -621.3871556845581
Iteration: 7, Func. Count: 49, Neg. LLF: -621.3872309402807
Optimization terminated successfully. (Exit mode 0)
Current function value: -621.3872309402113
Iterations: 7
Function evaluations: 49
Gradient evaluations: 7
Iteration: 1, Func. Count: 5, Neg. LLF: -621.3809638191171
Iteration: 2, Func. Count: 17, Neg. LLF: -621.3897484903625
Iteration: 3, Func. Count: 25, Neg. LLF: -621.3916500508253
Iteration: 4, Func. Count: 31, Neg. LLF: -621.4298500681722
Iteration: 5, Func. Count: 39, Neg. LLF: -621.4302219773715
Iteration: 6, Func. Count: 46, Neg. LLF: -621.4328898177615
Iteration: 7, Func. Count: 51, Neg. LLF: -621.4343652314969
Iteration: 8, Func. Count: 56, Neg. LLF: -621.4344609108131
Optimization terminated successfully. (Exit mode 0)
Current function value: -621.4344609108799
Iterations: 8
Function evaluations: 56
Gradient evaluations: 8
Iteration: 1, Func. Count: 5, Neg. LLF: -621.2707155160795
Iteration: 2, Func. Count: 17, Neg. LLF: -621.2797646969923
Iteration: 3, Func. Count: 25, Neg. LLF: -621.2829636548519
Iteration: 4, Func. Count: 31, Neg. LLF: -621.3236981295722
Iteration: 5, Func. Count: 39, Neg. LLF: -621.3237450946556
Iteration: 6, Func. Count: 44, Neg. LLF: -621.3266632792045
Iteration: 7, Func. Count: 49, Neg. LLF: -621.3267680613926
Optimization terminated successfully. (Exit mode 0)
Current function value: -621.3267680615061
Iterations: 7
Function evaluations: 49
Gradient evaluations: 7
Iteration: 1, Func. Count: 5, Neg. LLF: -621.0428107089929
Iteration: 2, Func. Count: 17, Neg. LLF: -621.0510187885615
Iteration: 3, Func. Count: 25, Neg. LLF: -621.0537274748115
Iteration: 4, Func. Count: 31, Neg. LLF: -621.1011992064423
Iteration: 5, Func. Count: 39, Neg. LLF: -621.1012231855651
Iteration: 6, Func. Count: 45, Neg. LLF: -621.1043641619581
Iteration: 7, Func. Count: 50, Neg. LLF: -621.1043848175209
Optimization terminated successfully. (Exit mode 0)
Current function value: -621.1043853359654
Iterations: 7
Function evaluations: 51
Gradient evaluations: 7
Iteration: 1, Func. Count: 5, Neg. LLF: -621.0397699129832
Iteration: 2, Func. Count: 17, Neg. LLF: -621.0464514092298
Iteration: 3, Func. Count: 25, Neg. LLF: -621.0489840798806
Iteration: 4, Func. Count: 31, Neg. LLF: -621.0887870909721
Iteration: 5, Func. Count: 39, Neg. LLF: -621.088835781464
Iteration: 6, Func. Count: 44, Neg. LLF: -621.0916529304768
Iteration: 7, Func. Count: 49, Neg. LLF: -621.0917235875504
Optimization terminated successfully. (Exit mode 0)
Current function value: -621.091723587642
Iterations: 7
Function evaluations: 49
Gradient evaluations: 7
Iteration: 1, Func. Count: 5, Neg. LLF: -620.9049987735482
Iteration: 2, Func. Count: 17, Neg. LLF: -620.9129672104211
Iteration: 3, Func. Count: 25, Neg. LLF: -620.9166148817476
Iteration: 4, Func. Count: 31, Neg. LLF: -620.9576189170382
Iteration: 5, Func. Count: 39, Neg. LLF: -620.9576698696603
Iteration: 6, Func. Count: 44, Neg. LLF: -620.9606321127103
Iteration: 7, Func. Count: 49, Neg. LLF: -620.9606924594966
Optimization terminated successfully. (Exit mode 0)
Current function value: -620.9606924595835
Iterations: 7
Function evaluations: 49
Gradient evaluations: 7
Iteration: 1, Func. Count: 5, Neg. LLF: -617.6436022537325
Iteration: 2, Func. Count: 17, Neg. LLF: -617.6460654827396
Iteration: 3, Func. Count: 26, Neg. LLF: -617.6480001563498
Iteration: 4, Func. Count: 35, Neg. LLF: -617.6494050378678
Iteration: 5, Func. Count: 41, Neg. LLF: -617.6704032586156
Iteration: 6, Func. Count: 48, Neg. LLF: -617.6705375455272
Iteration: 7, Func. Count: 54, Neg. LLF: -617.6722191999497
Optimization terminated successfully. (Exit mode 0)
Current function value: -617.6722191999224
Iterations: 8
Function evaluations: 54
Gradient evaluations: 7
Iteration: 1, Func. Count: 5, Neg. LLF: -613.3869116667927
Iteration: 2, Func. Count: 17, Neg. LLF: -613.4108510347517
Positive directional derivative for linesearch (Exit mode 8)
Current function value: -613.4108510574767
Iterations: 6
Function evaluations: 17
Gradient evaluations: 2
Iteration: 1, Func. Count: 5, Neg. LLF: -608.9757547481968
Iteration: 2, Func. Count: 17, Neg. LLF: -609.1853376775184
Iteration: 3, Func. Count: 24, Neg. LLF: -609.2019683029564
Iteration: 4, Func. Count: 31, Neg. LLF: -609.2117209039915
Iteration: 5, Func. Count: 37, Neg. LLF: -609.2289700848976
Iteration: 6, Func. Count: 44, Neg. LLF: -609.2399118083413
Iteration: 7, Func. Count: 52, Neg. LLF: -609.2399775061285
Iteration: 8, Func. Count: 58, Neg. LLF: -609.2406765296446
Iteration: 9, Func. Count: 63, Neg. LLF: -609.2406937543128
Optimization terminated successfully. (Exit mode 0)
Current function value: -609.2406937543619
Iterations: 9
Function evaluations: 63
Gradient evaluations: 9
Iteration: 1, Func. Count: 5, Neg. LLF: -607.7338831006607
Iteration: 2, Func. Count: 17, Neg. LLF: -608.0237323110849
Positive directional derivative for linesearch (Exit mode 8)
Current function value: -608.0237323754008
Iterations: 6
Function evaluations: 17
Gradient evaluations: 2
/Users/austincostello1/anaconda3/lib/python3.6/site-packages/arch/univariate/base.py:522: ConvergenceWarning: The optimizer returned code 8. The message is: Positive directional derivative for linesearch See scipy.optimize.fmin_slsqp for code meaning. ConvergenceWarning) /Users/austincostello1/anaconda3/lib/python3.6/site-packages/arch/univariate/base.py:522: ConvergenceWarning: The optimizer returned code 8. The message is: Positive directional derivative for linesearch See scipy.optimize.fmin_slsqp for code meaning. ConvergenceWarning) /Users/austincostello1/anaconda3/lib/python3.6/site-packages/arch/univariate/base.py:522: ConvergenceWarning: The optimizer returned code 8. The message is: Positive directional derivative for linesearch See scipy.optimize.fmin_slsqp for code meaning. ConvergenceWarning)
Iteration: 1, Func. Count: 5, Neg. LLF: -600.2850369594495
Iteration: 2, Func. Count: 17, Neg. LLF: -600.7200422799884
Iteration: 3, Func. Count: 29, Neg. LLF: -600.7200768301298
Positive directional derivative for linesearch (Exit mode 8)
Current function value: -600.720076822363
Iterations: 7
Function evaluations: 29
Gradient evaluations: 3
Iteration: 1, Func. Count: 5, Neg. LLF: -599.8503231879239
Iteration: 2, Func. Count: 17, Neg. LLF: -600.1729292067384
Iteration: 3, Func. Count: 29, Neg. LLF: -600.1729387563016
Iteration: 4, Func. Count: 36, Neg. LLF: -600.1809985704865
Iteration: 5, Func. Count: 43, Neg. LLF: -600.1814706301559
Iteration: 6, Func. Count: 51, Neg. LLF: -600.1814817803975
Iteration: 7, Func. Count: 58, Neg. LLF: -600.1817370300023
Iteration: 8, Func. Count: 63, Neg. LLF: -600.1817526374871
Optimization terminated successfully. (Exit mode 0)
Current function value: -600.181752637488
Iterations: 9
Function evaluations: 63
Gradient evaluations: 8
Iteration: 1, Func. Count: 5, Neg. LLF: -600.8835676209447
Iteration: 2, Func. Count: 17, Neg. LLF: -601.2006608139708
Iteration: 3, Func. Count: 23, Neg. LLF: -601.2201508591246
Iteration: 4, Func. Count: 31, Neg. LLF: -601.2202627111598
Iteration: 5, Func. Count: 38, Neg. LLF: -601.2265858382106
Optimization terminated successfully. (Exit mode 0)
Current function value: -601.22658589088
Iterations: 5
Function evaluations: 44
Gradient evaluations: 5
Iteration: 1, Func. Count: 5, Neg. LLF: -601.1420088893315
Iteration: 2, Func. Count: 17, Neg. LLF: -601.5671807466957
Iteration: 3, Func. Count: 26, Neg. LLF: -601.5672577963876
Iteration: 4, Func. Count: 38, Neg. LLF: -601.567517524772
Iteration: 5, Func. Count: 49, Neg. LLF: -601.5675199324385
Positive directional derivative for linesearch (Exit mode 8)
Current function value: -601.5675199483255
Iterations: 9
Function evaluations: 49
Gradient evaluations: 5
/Users/austincostello1/anaconda3/lib/python3.6/site-packages/arch/univariate/base.py:522: ConvergenceWarning: The optimizer returned code 8. The message is: Positive directional derivative for linesearch See scipy.optimize.fmin_slsqp for code meaning. ConvergenceWarning) /Users/austincostello1/anaconda3/lib/python3.6/site-packages/arch/univariate/base.py:522: ConvergenceWarning: The optimizer returned code 8. The message is: Positive directional derivative for linesearch See scipy.optimize.fmin_slsqp for code meaning. ConvergenceWarning) /Users/austincostello1/anaconda3/lib/python3.6/site-packages/arch/univariate/base.py:522: ConvergenceWarning: The optimizer returned code 8. The message is: Positive directional derivative for linesearch See scipy.optimize.fmin_slsqp for code meaning. ConvergenceWarning)
Iteration: 1, Func. Count: 5, Neg. LLF: -596.9246713460133
Iteration: 2, Func. Count: 17, Neg. LLF: -597.5810089431386
Iteration: 3, Func. Count: 29, Neg. LLF: -597.5813152199021
Iteration: 4, Func. Count: 36, Neg. LLF: -597.6009314172068
Iteration: 5, Func. Count: 43, Neg. LLF: -597.6226301196928
Iteration: 6, Func. Count: 55, Neg. LLF: -597.6226499290048
Iteration: 7, Func. Count: 63, Neg. LLF: -597.6258731080824
Iteration: 8, Func. Count: 70, Neg. LLF: -597.6327345232313
Iteration: 9, Func. Count: 78, Neg. LLF: -597.6329796660989
Iteration: 10, Func. Count: 83, Neg. LLF: -597.6329943749165
Optimization terminated successfully. (Exit mode 0)
Current function value: -597.632994374928
Iterations: 12
Function evaluations: 83
Gradient evaluations: 10
Iteration: 1, Func. Count: 5, Neg. LLF: -596.211487695897
Iteration: 2, Func. Count: 17, Neg. LLF: -596.6836559859632
Iteration: 3, Func. Count: 29, Neg. LLF: -596.6837179212013
Iteration: 4, Func. Count: 40, Neg. LLF: -596.6837197690669
Iteration: 5, Func. Count: 52, Neg. LLF: -596.6837220265281
Positive directional derivative for linesearch (Exit mode 8)
Current function value: -596.6837220104038
Iterations: 9
Function evaluations: 52
Gradient evaluations: 5
Iteration: 1, Func. Count: 5, Neg. LLF: -596.642395262194
Iteration: 2, Func. Count: 17, Neg. LLF: -597.0092984376026
Iteration: 3, Func. Count: 29, Neg. LLF: -597.0093094769811
Iteration: 4, Func. Count: 36, Neg. LLF: -597.0251820269602
Iteration: 5, Func. Count: 48, Neg. LLF: -597.0361415002694
Positive directional derivative for linesearch (Exit mode 8)
Current function value: -597.0361414942874
Iterations: 9
Function evaluations: 48
Gradient evaluations: 5
Iteration: 1, Func. Count: 5, Neg. LLF: -595.1169745844362
Iteration: 2, Func. Count: 17, Neg. LLF: -595.6439883013375
Iteration: 3, Func. Count: 25, Neg. LLF: -595.6471177853823
Iteration: 4, Func. Count: 33, Neg. LLF: -595.6471776436853
Iteration: 5, Func. Count: 41, Neg. LLF: -595.6472811298322
Optimization terminated successfully. (Exit mode 0)
Current function value: -595.6472811298208
Iterations: 5
Function evaluations: 41
Gradient evaluations: 5
Iteration: 1, Func. Count: 5, Neg. LLF: -596.521633632562
Iteration: 2, Func. Count: 17, Neg. LLF: -597.0723464153369
Iteration: 3, Func. Count: 29, Neg. LLF: -597.07250967814
Optimization terminated successfully. (Exit mode 0)
Current function value: -597.0725098785405
Iterations: 5
Function evaluations: 36
Gradient evaluations: 3
Iteration: 1, Func. Count: 5, Neg. LLF: -595.4171190586542
Iteration: 2, Func. Count: 17, Neg. LLF: -596.0673009414295
Iteration: 3, Func. Count: 29, Neg. LLF: -596.0676104448012
Positive directional derivative for linesearch (Exit mode 8)
Current function value: -596.067610445852
Iterations: 7
Function evaluations: 29
Gradient evaluations: 3
Iteration: 1, Func. Count: 5, Neg. LLF: -595.1060610858547
Iteration: 2, Func. Count: 17, Neg. LLF: -595.6963183840076
Iteration: 3, Func. Count: 29, Neg. LLF: -595.6965407948416
Iteration: 4, Func. Count: 39, Neg. LLF: -595.696545521326
Iteration: 5, Func. Count: 46, Neg. LLF: -595.7009283710702
Iteration: 6, Func. Count: 54, Neg. LLF: -595.701025185655
Iteration: 7, Func. Count: 62, Neg. LLF: -595.7011246161758
Optimization terminated successfully. (Exit mode 0)
Current function value: -595.7011248300387
Iterations: 9
Function evaluations: 67
Gradient evaluations: 7
Iteration: 1, Func. Count: 5, Neg. LLF: -594.882006607062
Iteration: 2, Func. Count: 17, Neg. LLF: -595.4809045386551
Iteration: 3, Func. Count: 29, Neg. LLF: -595.481129527328
Iteration: 4, Func. Count: 36, Neg. LLF: -595.4852604890534
Iteration: 5, Func. Count: 43, Neg. LLF: -595.4864861435556
Iteration: 6, Func. Count: 52, Neg. LLF: -595.4864945655572
Iteration: 7, Func. Count: 57, Neg. LLF: -595.4864965560023
Optimization terminated successfully. (Exit mode 0)
Current function value: -595.4864965559852
Iterations: 8
Function evaluations: 57
Gradient evaluations: 7
/Users/austincostello1/anaconda3/lib/python3.6/site-packages/arch/univariate/base.py:522: ConvergenceWarning: The optimizer returned code 8. The message is: Positive directional derivative for linesearch See scipy.optimize.fmin_slsqp for code meaning. ConvergenceWarning)
Iteration: 1, Func. Count: 5, Neg. LLF: -594.7022429462068
Iteration: 2, Func. Count: 17, Neg. LLF: -595.3011178458171
Iteration: 3, Func. Count: 24, Neg. LLF: -595.3038842911409
Iteration: 4, Func. Count: 30, Neg. LLF: -595.306334975242
Optimization terminated successfully. (Exit mode 0)
Current function value: -595.3063349710051
Iterations: 4
Function evaluations: 41
Gradient evaluations: 4
Iteration: 1, Func. Count: 5, Neg. LLF: -593.5341478683158
Iteration: 2, Func. Count: 17, Neg. LLF: -594.1452736792241
Iteration: 3, Func. Count: 26, Neg. LLF: -594.14535638997
Iteration: 4, Func. Count: 33, Neg. LLF: -594.1535724035257
Iteration: 5, Func. Count: 41, Neg. LLF: -594.153737757485
Iteration: 6, Func. Count: 46, Neg. LLF: -594.15375836424
Optimization terminated successfully. (Exit mode 0)
Current function value: -594.1537583642246
Iterations: 6
Function evaluations: 46
Gradient evaluations: 6
Iteration: 1, Func. Count: 5, Neg. LLF: -593.0098240453556
Iteration: 2, Func. Count: 17, Neg. LLF: -593.6147985873865
Iteration: 3, Func. Count: 25, Neg. LLF: -593.6159693451725
Iteration: 4, Func. Count: 32, Neg. LLF: -593.6261747542293
Iteration: 5, Func. Count: 41, Neg. LLF: -593.6262111402705
Iteration: 6, Func. Count: 47, Neg. LLF: -593.6263017477534
Iteration: 7, Func. Count: 52, Neg. LLF: -593.626320661802
Iteration: 8, Func. Count: 57, Neg. LLF: -593.6263252723329
Optimization terminated successfully. (Exit mode 0)
Current function value: -593.6263252723334
Iterations: 8
Function evaluations: 57
Gradient evaluations: 8
Iteration: 1, Func. Count: 5, Neg. LLF: -593.2341645590204
Iteration: 2, Func. Count: 17, Neg. LLF: -593.8693371545019
Iteration: 3, Func. Count: 24, Neg. LLF: -593.8720239667057
Iteration: 4, Func. Count: 31, Neg. LLF: -593.8795982390948
Iteration: 5, Func. Count: 38, Neg. LLF: -593.880137238471
Iteration: 6, Func. Count: 46, Neg. LLF: -593.8803074699306
Iteration: 7, Func. Count: 51, Neg. LLF: -593.8804383862345
Iteration: 8, Func. Count: 56, Neg. LLF: -593.8805899040347
Optimization terminated successfully. (Exit mode 0)
Current function value: -593.880590277118
Iterations: 8
Function evaluations: 57
Gradient evaluations: 8
Iteration: 1, Func. Count: 5, Neg. LLF: -591.7083623134515
Iteration: 2, Func. Count: 17, Neg. LLF: -592.2785005240105
Iteration: 3, Func. Count: 29, Neg. LLF: -592.2785548398747
Positive directional derivative for linesearch (Exit mode 8)
Current function value: -592.27855483648
Iterations: 7
Function evaluations: 29
Gradient evaluations: 3
Iteration: 1, Func. Count: 5, Neg. LLF: -592.3011580984232
Iteration: 2, Func. Count: 17, Neg. LLF: -592.962373262048
Iteration: 3, Func. Count: 29, Neg. LLF: -592.9626125417697
Positive directional derivative for linesearch (Exit mode 8)
Current function value: -592.9626125453394
Iterations: 7
Function evaluations: 29
Gradient evaluations: 3
Iteration: 1, Func. Count: 5, Neg. LLF: -593.0664840359823
Iteration: 2, Func. Count: 17, Neg. LLF: -593.6201237633254
Iteration: 3, Func. Count: 25, Neg. LLF: -593.6207937355276
Iteration: 4, Func. Count: 32, Neg. LLF: -593.6226949024889
Iteration: 5, Func. Count: 41, Neg. LLF: -593.622701456777
Iteration: 6, Func. Count: 47, Neg. LLF: -593.6228070051325
Iteration: 7, Func. Count: 52, Neg. LLF: -593.6228109407299
Optimization terminated successfully. (Exit mode 0)
Current function value: -593.6228109407489
Iterations: 7
Function evaluations: 52
Gradient evaluations: 7
/Users/austincostello1/anaconda3/lib/python3.6/site-packages/arch/univariate/base.py:522: ConvergenceWarning: The optimizer returned code 8. The message is: Positive directional derivative for linesearch See scipy.optimize.fmin_slsqp for code meaning. ConvergenceWarning) /Users/austincostello1/anaconda3/lib/python3.6/site-packages/arch/univariate/base.py:522: ConvergenceWarning: The optimizer returned code 8. The message is: Positive directional derivative for linesearch See scipy.optimize.fmin_slsqp for code meaning. ConvergenceWarning)
Iteration: 1, Func. Count: 5, Neg. LLF: -593.4642448990677
Iteration: 2, Func. Count: 17, Neg. LLF: -593.9616820815447
Iteration: 3, Func. Count: 25, Neg. LLF: -593.963058392809
Iteration: 4, Func. Count: 32, Neg. LLF: -593.9730308042219
Iteration: 5, Func. Count: 41, Neg. LLF: -593.9730340657082
Iteration: 6, Func. Count: 46, Neg. LLF: -593.9730674546583
Iteration: 7, Func. Count: 51, Neg. LLF: -593.9730881575229
Optimization terminated successfully. (Exit mode 0)
Current function value: -593.9730881575344
Iterations: 7
Function evaluations: 51
Gradient evaluations: 7
Iteration: 1, Func. Count: 5, Neg. LLF: -594.0011257889662
Iteration: 2, Func. Count: 17, Neg. LLF: -594.6025039466167
Iteration: 3, Func. Count: 26, Neg. LLF: -594.6033432043162
Iteration: 4, Func. Count: 32, Neg. LLF: -594.6089323607882
Iteration: 5, Func. Count: 40, Neg. LLF: -594.608956724024
Iteration: 6, Func. Count: 45, Neg. LLF: -594.6155225820669
Iteration: 7, Func. Count: 50, Neg. LLF: -594.6183602311163
Iteration: 8, Func. Count: 55, Neg. LLF: -594.6183680713948
Optimization terminated successfully. (Exit mode 0)
Current function value: -594.6183680713813
Iterations: 8
Function evaluations: 55
Gradient evaluations: 8
Iteration: 1, Func. Count: 5, Neg. LLF: -595.9900008046053
Iteration: 2, Func. Count: 17, Neg. LLF: -596.529795517198
Iteration: 3, Func. Count: 26, Neg. LLF: -596.530299932974
Iteration: 4, Func. Count: 35, Neg. LLF: -596.5316343745568
Iteration: 5, Func. Count: 44, Neg. LLF: -596.532076391579
Iteration: 6, Func. Count: 56, Neg. LLF: -596.5322742554323
Positive directional derivative for linesearch (Exit mode 8)
Current function value: -596.5322742564271
Iterations: 10
Function evaluations: 56
Gradient evaluations: 6
Iteration: 1, Func. Count: 5, Neg. LLF: -591.1888101371992
Iteration: 2, Func. Count: 17, Neg. LLF: -591.6834701639561
Iteration: 3, Func. Count: 25, Neg. LLF: -591.6842312087554
Iteration: 4, Func. Count: 32, Neg. LLF: -591.6902165312338
Iteration: 5, Func. Count: 41, Neg. LLF: -591.6902189815899
Iteration: 6, Func. Count: 46, Neg. LLF: -591.6902360174943
Optimization terminated successfully. (Exit mode 0)
Current function value: -591.6902360174694
Iterations: 6
Function evaluations: 46
Gradient evaluations: 6
/Users/austincostello1/anaconda3/lib/python3.6/site-packages/arch/univariate/base.py:522: ConvergenceWarning: The optimizer returned code 8. The message is: Positive directional derivative for linesearch See scipy.optimize.fmin_slsqp for code meaning. ConvergenceWarning) /Users/austincostello1/anaconda3/lib/python3.6/site-packages/arch/univariate/base.py:522: ConvergenceWarning: The optimizer returned code 8. The message is: Positive directional derivative for linesearch See scipy.optimize.fmin_slsqp for code meaning. ConvergenceWarning)
Iteration: 1, Func. Count: 5, Neg. LLF: -590.9761902297207
Iteration: 2, Func. Count: 17, Neg. LLF: -591.4539717336189
Iteration: 3, Func. Count: 26, Neg. LLF: -591.4542417242474
Iteration: 4, Func. Count: 32, Neg. LLF: -591.482707340419
Iteration: 5, Func. Count: 39, Neg. LLF: -591.482792669369
Iteration: 6, Func. Count: 45, Neg. LLF: -591.4838412683387
Iteration: 7, Func. Count: 50, Neg. LLF: -591.483844616859
Optimization terminated successfully. (Exit mode 0)
Current function value: -591.4838446168645
Iterations: 7
Function evaluations: 50
Gradient evaluations: 7
Iteration: 1, Func. Count: 5, Neg. LLF: -588.5231045438568
Iteration: 2, Func. Count: 17, Neg. LLF: -588.9457357178387
Positive directional derivative for linesearch (Exit mode 8)
Current function value: -588.9457357210466
Iterations: 6
Function evaluations: 17
Gradient evaluations: 2
Iteration: 1, Func. Count: 5, Neg. LLF: -588.4368813299178
Iteration: 2, Func. Count: 17, Neg. LLF: -588.866156720984
Positive directional derivative for linesearch (Exit mode 8)
Current function value: -588.866156734409
Iterations: 6
Function evaluations: 17
Gradient evaluations: 2
Iteration: 1, Func. Count: 5, Neg. LLF: -587.5779067007694
Iteration: 2, Func. Count: 17, Neg. LLF: -587.9727046599942
Iteration: 3, Func. Count: 29, Neg. LLF: -587.9727079254837
Iteration: 4, Func. Count: 40, Neg. LLF: -587.9727114107508
Optimization terminated successfully. (Exit mode 0)
Current function value: -587.972712483255
Iterations: 7
Function evaluations: 48
Gradient evaluations: 4
/Users/austincostello1/anaconda3/lib/python3.6/site-packages/arch/univariate/base.py:522: ConvergenceWarning: The optimizer returned code 8. The message is: Positive directional derivative for linesearch See scipy.optimize.fmin_slsqp for code meaning. ConvergenceWarning) /Users/austincostello1/anaconda3/lib/python3.6/site-packages/arch/univariate/base.py:522: ConvergenceWarning: The optimizer returned code 8. The message is: Positive directional derivative for linesearch See scipy.optimize.fmin_slsqp for code meaning. ConvergenceWarning) /Users/austincostello1/anaconda3/lib/python3.6/site-packages/arch/univariate/base.py:522: ConvergenceWarning: The optimizer returned code 8. The message is: Positive directional derivative for linesearch See scipy.optimize.fmin_slsqp for code meaning. ConvergenceWarning)
Iteration: 1, Func. Count: 5, Neg. LLF: -587.3092906122961
Iteration: 2, Func. Count: 17, Neg. LLF: -587.6232539704636
Positive directional derivative for linesearch (Exit mode 8)
Current function value: -587.623253974072
Iterations: 6
Function evaluations: 17
Gradient evaluations: 2
Iteration: 1, Func. Count: 5, Neg. LLF: -586.8671968624844
Iteration: 2, Func. Count: 17, Neg. LLF: -587.2007884756558
Iteration: 3, Func. Count: 28, Neg. LLF: -587.2007898596702
Positive directional derivative for linesearch (Exit mode 8)
Current function value: -587.2007898751637
Iterations: 7
Function evaluations: 28
Gradient evaluations: 3
Iteration: 1, Func. Count: 5, Neg. LLF: -586.8393157577771
Iteration: 2, Func. Count: 17, Neg. LLF: -587.1782705117428
Positive directional derivative for linesearch (Exit mode 8)
Current function value: -587.1782705190793
Iterations: 6
Function evaluations: 17
Gradient evaluations: 2
Iteration: 1, Func. Count: 5, Neg. LLF: -587.7228936517126
Iteration: 2, Func. Count: 17, Neg. LLF: -588.053828633703
Positive directional derivative for linesearch (Exit mode 8)
Current function value: -588.0538286419686
Iterations: 6
Function evaluations: 17
Gradient evaluations: 2
/Users/austincostello1/anaconda3/lib/python3.6/site-packages/arch/univariate/base.py:522: ConvergenceWarning: The optimizer returned code 8. The message is: Positive directional derivative for linesearch See scipy.optimize.fmin_slsqp for code meaning. ConvergenceWarning) /Users/austincostello1/anaconda3/lib/python3.6/site-packages/arch/univariate/base.py:522: ConvergenceWarning: The optimizer returned code 8. The message is: Positive directional derivative for linesearch See scipy.optimize.fmin_slsqp for code meaning. ConvergenceWarning) /Users/austincostello1/anaconda3/lib/python3.6/site-packages/arch/univariate/base.py:522: ConvergenceWarning: The optimizer returned code 8. The message is: Positive directional derivative for linesearch See scipy.optimize.fmin_slsqp for code meaning. ConvergenceWarning) /Users/austincostello1/anaconda3/lib/python3.6/site-packages/arch/univariate/base.py:522: ConvergenceWarning: The optimizer returned code 8. The message is: Positive directional derivative for linesearch See scipy.optimize.fmin_slsqp for code meaning. ConvergenceWarning)
Iteration: 1, Func. Count: 5, Neg. LLF: -587.8473445509843
Iteration: 2, Func. Count: 17, Neg. LLF: -588.1967785689985
Iteration: 3, Func. Count: 28, Neg. LLF: -588.1967800234911
Positive directional derivative for linesearch (Exit mode 8)
Current function value: -588.1967800355931
Iterations: 7
Function evaluations: 28
Gradient evaluations: 3
Iteration: 1, Func. Count: 5, Neg. LLF: -588.1674302041172
Iteration: 2, Func. Count: 17, Neg. LLF: -588.5551806924914
Iteration: 3, Func. Count: 26, Neg. LLF: -588.5553784170202
Iteration: 4, Func. Count: 38, Neg. LLF: -588.5555908017936
Positive directional derivative for linesearch (Exit mode 8)
Current function value: -588.5555908011876
Iterations: 8
Function evaluations: 38
Gradient evaluations: 4
Iteration: 1, Func. Count: 5, Neg. LLF: -588.9601311657268
Iteration: 2, Func. Count: 17, Neg. LLF: -589.2996029586317
Positive directional derivative for linesearch (Exit mode 8)
Current function value: -589.2996029607781
Iterations: 6
Function evaluations: 17
Gradient evaluations: 2
Iteration: 1, Func. Count: 5, Neg. LLF: -589.9119512284713
Iteration: 2, Func. Count: 17, Neg. LLF: -590.2545661886375
Iteration: 3, Func. Count: 26, Neg. LLF: -590.2547910085559
Iteration: 4, Func. Count: 38, Neg. LLF: -590.2550339213151
Iteration: 5, Func. Count: 45, Neg. LLF: -590.2682353923957
Iteration: 6, Func. Count: 57, Neg. LLF: -590.2792045716524
Positive directional derivative for linesearch (Exit mode 8)
Current function value: -590.2792045677388
Iterations: 10
Function evaluations: 57
Gradient evaluations: 6
/Users/austincostello1/anaconda3/lib/python3.6/site-packages/arch/univariate/base.py:522: ConvergenceWarning: The optimizer returned code 8. The message is: Positive directional derivative for linesearch See scipy.optimize.fmin_slsqp for code meaning. ConvergenceWarning) /Users/austincostello1/anaconda3/lib/python3.6/site-packages/arch/univariate/base.py:522: ConvergenceWarning: The optimizer returned code 8. The message is: Positive directional derivative for linesearch See scipy.optimize.fmin_slsqp for code meaning. ConvergenceWarning)
Iteration: 1, Func. Count: 5, Neg. LLF: -588.9001171246696
Iteration: 2, Func. Count: 17, Neg. LLF: -589.2783682421589
Iteration: 3, Func. Count: 25, Neg. LLF: -589.2796088562951
Iteration: 4, Func. Count: 32, Neg. LLF: -589.2880790977949
Iteration: 5, Func. Count: 40, Neg. LLF: -589.3031141258839
Iteration: 6, Func. Count: 52, Neg. LLF: -589.3031583703969
Iteration: 7, Func. Count: 59, Neg. LLF: -589.308616926058
Iteration: 8, Func. Count: 71, Neg. LLF: -589.3180700569703
Iteration: 9, Func. Count: 78, Neg. LLF: -589.3312465465162
Iteration: 10, Func. Count: 86, Neg. LLF: -589.3315659644541
Iteration: 11, Func. Count: 94, Neg. LLF: -589.3321677956583
Iteration: 12, Func. Count: 101, Neg. LLF: -589.3321746825875
Iteration: 13, Func. Count: 106, Neg. LLF: -589.3322373512301
Iteration: 14, Func. Count: 111, Neg. LLF: -589.3322405316829
Optimization terminated successfully. (Exit mode 0)
Current function value: -589.332240531683
Iterations: 16
Function evaluations: 111
Gradient evaluations: 14
Iteration: 1, Func. Count: 5, Neg. LLF: -589.612363181079
Iteration: 2, Func. Count: 17, Neg. LLF: -590.0038181973399
Iteration: 3, Func. Count: 24, Neg. LLF: -590.0415083896073
Iteration: 4, Func. Count: 31, Neg. LLF: -590.049206895314
Iteration: 5, Func. Count: 39, Neg. LLF: -590.0493954689746
Iteration: 6, Func. Count: 45, Neg. LLF: -590.0494377489371
Iteration: 7, Func. Count: 51, Neg. LLF: -590.0494397426329
Optimization terminated successfully. (Exit mode 0)
Current function value: -590.0494397426593
Iterations: 7
Function evaluations: 51
Gradient evaluations: 7
Iteration: 1, Func. Count: 5, Neg. LLF: -590.3213700419346
Iteration: 2, Func. Count: 17, Neg. LLF: -590.7803113947464
Iteration: 3, Func. Count: 26, Neg. LLF: -590.7804530729596
Iteration: 4, Func. Count: 38, Neg. LLF: -590.7806543458896
Iteration: 5, Func. Count: 46, Neg. LLF: -590.7824036909055
Iteration: 6, Func. Count: 58, Neg. LLF: -590.7840752065819
Iteration: 7, Func. Count: 66, Neg. LLF: -590.7864562246574
Iteration: 8, Func. Count: 78, Neg. LLF: -590.7886472936805
Positive directional derivative for linesearch (Exit mode 8)
Current function value: -590.7886473039666
Iterations: 12
Function evaluations: 78
Gradient evaluations: 8
/Users/austincostello1/anaconda3/lib/python3.6/site-packages/arch/univariate/base.py:522: ConvergenceWarning: The optimizer returned code 8. The message is: Positive directional derivative for linesearch See scipy.optimize.fmin_slsqp for code meaning. ConvergenceWarning) /Users/austincostello1/anaconda3/lib/python3.6/site-packages/arch/univariate/base.py:522: ConvergenceWarning: The optimizer returned code 8. The message is: Positive directional derivative for linesearch See scipy.optimize.fmin_slsqp for code meaning. ConvergenceWarning)
Iteration: 1, Func. Count: 5, Neg. LLF: -591.7820505320425
Iteration: 2, Func. Count: 17, Neg. LLF: -592.2301815028563
Iteration: 3, Func. Count: 29, Neg. LLF: -592.2301873034419
Iteration: 4, Func. Count: 36, Neg. LLF: -592.271093385011
Iteration: 5, Func. Count: 43, Neg. LLF: -592.2804964215004
Iteration: 6, Func. Count: 51, Neg. LLF: -592.2809357162844
Iteration: 7, Func. Count: 59, Neg. LLF: -592.2810888650112
Iteration: 8, Func. Count: 64, Neg. LLF: -592.2811516536135
Iteration: 9, Func. Count: 69, Neg. LLF: -592.2811561370912
Optimization terminated successfully. (Exit mode 0)
Current function value: -592.2811561370913
Iterations: 10
Function evaluations: 69
Gradient evaluations: 9
Iteration: 1, Func. Count: 5, Neg. LLF: -591.6602484826944
Iteration: 2, Func. Count: 17, Neg. LLF: -592.0867290474273
Iteration: 3, Func. Count: 24, Neg. LLF: -592.1220058011193
Iteration: 4, Func. Count: 30, Neg. LLF: -592.1306638669773
Iteration: 5, Func. Count: 39, Neg. LLF: -592.1306673872675
Iteration: 6, Func. Count: 46, Neg. LLF: -592.136931234117
Iteration: 7, Func. Count: 51, Neg. LLF: -592.1369393977295
Optimization terminated successfully. (Exit mode 0)
Current function value: -592.1369393976767
Iterations: 7
Function evaluations: 51
Gradient evaluations: 7
Iteration: 1, Func. Count: 5, Neg. LLF: -590.6105312282741
Iteration: 2, Func. Count: 17, Neg. LLF: -591.0417485669872
Positive directional derivative for linesearch (Exit mode 8)
Current function value: -591.041748565751
Iterations: 6
Function evaluations: 17
Gradient evaluations: 2
Iteration: 1, Func. Count: 5, Neg. LLF: -589.9963882269102
Iteration: 2, Func. Count: 17, Neg. LLF: -590.3944139470027
Iteration: 3, Func. Count: 29, Neg. LLF: -590.3944183082538
Positive directional derivative for linesearch (Exit mode 8)
Current function value: -590.3944183110491
Iterations: 7
Function evaluations: 29
Gradient evaluations: 3
/Users/austincostello1/anaconda3/lib/python3.6/site-packages/arch/univariate/base.py:522: ConvergenceWarning: The optimizer returned code 8. The message is: Positive directional derivative for linesearch See scipy.optimize.fmin_slsqp for code meaning. ConvergenceWarning) /Users/austincostello1/anaconda3/lib/python3.6/site-packages/arch/univariate/base.py:522: ConvergenceWarning: The optimizer returned code 8. The message is: Positive directional derivative for linesearch See scipy.optimize.fmin_slsqp for code meaning. ConvergenceWarning)
Iteration: 1, Func. Count: 5, Neg. LLF: -590.0099352099983
Iteration: 2, Func. Count: 17, Neg. LLF: -590.4183838534545
Iteration: 3, Func. Count: 25, Neg. LLF: -590.4213446143583
Iteration: 4, Func. Count: 34, Neg. LLF: -590.424885332023
Iteration: 5, Func. Count: 41, Neg. LLF: -590.4378670113508
Iteration: 6, Func. Count: 53, Neg. LLF: -590.4453187653379
Iteration: 7, Func. Count: 60, Neg. LLF: -590.464667514613
Iteration: 8, Func. Count: 67, Neg. LLF: -590.4703295534318
Optimization terminated successfully. (Exit mode 0)
Current function value: -590.4703299574744
Iterations: 9
Function evaluations: 72
Gradient evaluations: 8
Iteration: 1, Func. Count: 5, Neg. LLF: -590.7888116886404
Iteration: 2, Func. Count: 17, Neg. LLF: -591.1808059383809
Positive directional derivative for linesearch (Exit mode 8)
Current function value: -591.1808059417504
Iterations: 6
Function evaluations: 17
Gradient evaluations: 2
Iteration: 1, Func. Count: 5, Neg. LLF: -590.0127697357541
Iteration: 2, Func. Count: 17, Neg. LLF: -590.3997020011927
Iteration: 3, Func. Count: 29, Neg. LLF: -590.3997069582429
Positive directional derivative for linesearch (Exit mode 8)
Current function value: -590.3997069595434
Iterations: 7
Function evaluations: 29
Gradient evaluations: 3
/Users/austincostello1/anaconda3/lib/python3.6/site-packages/arch/univariate/base.py:522: ConvergenceWarning: The optimizer returned code 8. The message is: Positive directional derivative for linesearch See scipy.optimize.fmin_slsqp for code meaning. ConvergenceWarning) /Users/austincostello1/anaconda3/lib/python3.6/site-packages/arch/univariate/base.py:522: ConvergenceWarning: The optimizer returned code 8. The message is: Positive directional derivative for linesearch See scipy.optimize.fmin_slsqp for code meaning. ConvergenceWarning) /Users/austincostello1/anaconda3/lib/python3.6/site-packages/arch/univariate/base.py:522: ConvergenceWarning: The optimizer returned code 8. The message is: Positive directional derivative for linesearch See scipy.optimize.fmin_slsqp for code meaning. ConvergenceWarning) /Users/austincostello1/anaconda3/lib/python3.6/site-packages/arch/univariate/base.py:522: ConvergenceWarning: The optimizer returned code 8. The message is: Positive directional derivative for linesearch See scipy.optimize.fmin_slsqp for code meaning. ConvergenceWarning)
Iteration: 1, Func. Count: 5, Neg. LLF: -590.4475677092692
Iteration: 2, Func. Count: 17, Neg. LLF: -590.8522845369946
Iteration: 3, Func. Count: 26, Neg. LLF: -590.8525050737106
Iteration: 4, Func. Count: 38, Neg. LLF: -590.852774738853
Positive directional derivative for linesearch (Exit mode 8)
Current function value: -590.8527747442669
Iterations: 8
Function evaluations: 38
Gradient evaluations: 4
Iteration: 1, Func. Count: 5, Neg. LLF: -590.3717172639352
Iteration: 2, Func. Count: 17, Neg. LLF: -590.7628380233473
Positive directional derivative for linesearch (Exit mode 8)
Current function value: -590.7628380293368
Iterations: 6
Function evaluations: 17
Gradient evaluations: 2
Iteration: 1, Func. Count: 5, Neg. LLF: -589.4543340555358
Iteration: 2, Func. Count: 17, Neg. LLF: -589.847543209899
Iteration: 3, Func. Count: 29, Neg. LLF: -589.8475481016301
Positive directional derivative for linesearch (Exit mode 8)
Current function value: -589.8475481083364
Iterations: 7
Function evaluations: 29
Gradient evaluations: 3
Iteration: 1, Func. Count: 5, Neg. LLF: -589.1957364563468
Iteration: 2, Func. Count: 17, Neg. LLF: -589.602527728839
Positive directional derivative for linesearch (Exit mode 8)
Current function value: -589.6025277438409
Iterations: 6
Function evaluations: 17
Gradient evaluations: 2
/Users/austincostello1/anaconda3/lib/python3.6/site-packages/arch/univariate/base.py:522: ConvergenceWarning: The optimizer returned code 8. The message is: Positive directional derivative for linesearch See scipy.optimize.fmin_slsqp for code meaning. ConvergenceWarning) /Users/austincostello1/anaconda3/lib/python3.6/site-packages/arch/univariate/base.py:522: ConvergenceWarning: The optimizer returned code 8. The message is: Positive directional derivative for linesearch See scipy.optimize.fmin_slsqp for code meaning. ConvergenceWarning) /Users/austincostello1/anaconda3/lib/python3.6/site-packages/arch/univariate/base.py:522: ConvergenceWarning: The optimizer returned code 8. The message is: Positive directional derivative for linesearch See scipy.optimize.fmin_slsqp for code meaning. ConvergenceWarning) /Users/austincostello1/anaconda3/lib/python3.6/site-packages/arch/univariate/base.py:522: ConvergenceWarning: The optimizer returned code 8. The message is: Positive directional derivative for linesearch See scipy.optimize.fmin_slsqp for code meaning. ConvergenceWarning)
Iteration: 1, Func. Count: 5, Neg. LLF: -589.131504293096
Iteration: 2, Func. Count: 17, Neg. LLF: -589.5338510740089
Positive directional derivative for linesearch (Exit mode 8)
Current function value: -589.5338510885931
Iterations: 6
Function evaluations: 17
Gradient evaluations: 2
Iteration: 1, Func. Count: 5, Neg. LLF: -589.3359327729834
Iteration: 2, Func. Count: 17, Neg. LLF: -589.722256479299
Positive directional derivative for linesearch (Exit mode 8)
Current function value: -589.7222564940339
Iterations: 6
Function evaluations: 17
Gradient evaluations: 2
Iteration: 1, Func. Count: 5, Neg. LLF: -589.3326356297719
Iteration: 2, Func. Count: 17, Neg. LLF: -589.7119091784018
Iteration: 3, Func. Count: 26, Neg. LLF: -589.7123358151532
Iteration: 4, Func. Count: 38, Neg. LLF: -589.7128272760318
Positive directional derivative for linesearch (Exit mode 8)
Current function value: -589.7128272908127
Iterations: 8
Function evaluations: 38
Gradient evaluations: 4
Iteration: 1, Func. Count: 5, Neg. LLF: -588.6324460111744
Iteration: 2, Func. Count: 17, Neg. LLF: -588.9999871254734
Positive directional derivative for linesearch (Exit mode 8)
Current function value: -588.999987143237
Iterations: 6
Function evaluations: 17
Gradient evaluations: 2
/Users/austincostello1/anaconda3/lib/python3.6/site-packages/arch/univariate/base.py:522: ConvergenceWarning: The optimizer returned code 8. The message is: Positive directional derivative for linesearch See scipy.optimize.fmin_slsqp for code meaning. ConvergenceWarning) /Users/austincostello1/anaconda3/lib/python3.6/site-packages/arch/univariate/base.py:522: ConvergenceWarning: The optimizer returned code 8. The message is: Positive directional derivative for linesearch See scipy.optimize.fmin_slsqp for code meaning. ConvergenceWarning) /Users/austincostello1/anaconda3/lib/python3.6/site-packages/arch/univariate/base.py:522: ConvergenceWarning: The optimizer returned code 8. The message is: Positive directional derivative for linesearch See scipy.optimize.fmin_slsqp for code meaning. ConvergenceWarning)
Iteration: 1, Func. Count: 5, Neg. LLF: -588.4638699392767
Iteration: 2, Func. Count: 17, Neg. LLF: -588.81636422697
Iteration: 3, Func. Count: 24, Neg. LLF: -588.8299133112146
Iteration: 4, Func. Count: 31, Neg. LLF: -588.8374720090419
Iteration: 5, Func. Count: 38, Neg. LLF: -588.839365885693
Iteration: 6, Func. Count: 47, Neg. LLF: -588.83936777194
Iteration: 7, Func. Count: 53, Neg. LLF: -588.8396555056394
Optimization terminated successfully. (Exit mode 0)
Current function value: -588.8396555056477
Iterations: 7
Function evaluations: 53
Gradient evaluations: 7
Iteration: 1, Func. Count: 5, Neg. LLF: -588.340305730184
Iteration: 2, Func. Count: 17, Neg. LLF: -588.7007662226101
Positive directional derivative for linesearch (Exit mode 8)
Current function value: -588.7007662475911
Iterations: 6
Function evaluations: 17
Gradient evaluations: 2
Iteration: 1, Func. Count: 5, Neg. LLF: -587.0027840370235
Iteration: 2, Func. Count: 17, Neg. LLF: -587.3643072967686
Iteration: 3, Func. Count: 29, Neg. LLF: -587.3643095300401
Iteration: 4, Func. Count: 39, Neg. LLF: -587.3643461308085
Iteration: 5, Func. Count: 51, Neg. LLF: -587.3643814365208
Iteration: 6, Func. Count: 58, Neg. LLF: -587.3706220273369
Iteration: 7, Func. Count: 69, Neg. LLF: -587.3706235122368
Positive directional derivative for linesearch (Exit mode 8)
Current function value: -587.3706235245863
Iterations: 11
Function evaluations: 69
Gradient evaluations: 7
Iteration: 1, Func. Count: 5, Neg. LLF: -586.391479232947
Iteration: 2, Func. Count: 17, Neg. LLF: -586.7836654030609
Iteration: 3, Func. Count: 24, Neg. LLF: -586.7984258902293
Iteration: 4, Func. Count: 31, Neg. LLF: -586.8007465358382
Iteration: 5, Func. Count: 39, Neg. LLF: -586.8007585942836
Iteration: 6, Func. Count: 45, Neg. LLF: -586.800764781618
Iteration: 7, Func. Count: 50, Neg. LLF: -586.8007768774004
Optimization terminated successfully. (Exit mode 0)
Current function value: -586.8007768773558
Iterations: 7
Function evaluations: 50
Gradient evaluations: 7
Iteration: 1, Func. Count: 5, Neg. LLF: -585.350008347683
Iteration: 2, Func. Count: 17, Neg. LLF: -585.7311712291505
Iteration: 3, Func. Count: 29, Neg. LLF: -585.731172791968
Optimization terminated successfully. (Exit mode 0)
Current function value: -585.7311728100941
Iterations: 5
Function evaluations: 40
Gradient evaluations: 3
Iteration: 1, Func. Count: 5, Neg. LLF: -585.1582603819562
Iteration: 2, Func. Count: 17, Neg. LLF: -585.5670360293336
Iteration: 3, Func. Count: 29, Neg. LLF: -585.5670409712322
Iteration: 4, Func. Count: 36, Neg. LLF: -585.5788228304077
Iteration: 5, Func. Count: 43, Neg. LLF: -585.5858963278827
Iteration: 6, Func. Count: 50, Neg. LLF: -585.5881412312071
Iteration: 7, Func. Count: 58, Neg. LLF: -585.5881585468378
Iteration: 8, Func. Count: 64, Neg. LLF: -585.5883417978641
Optimization terminated successfully. (Exit mode 0)
Current function value: -585.5883417979128
Iterations: 9
Function evaluations: 64
Gradient evaluations: 8
Iteration: 1, Func. Count: 5, Neg. LLF: -583.8098084012956
Iteration: 2, Func. Count: 17, Neg. LLF: -584.2652598080085
Iteration: 3, Func. Count: 24, Neg. LLF: -584.2738872929441
Iteration: 4, Func. Count: 31, Neg. LLF: -584.2752957268324
Iteration: 5, Func. Count: 39, Neg. LLF: -584.275458856602
Iteration: 6, Func. Count: 45, Neg. LLF: -584.2755107788014
Iteration: 7, Func. Count: 51, Neg. LLF: -584.2755760986806
Optimization terminated successfully. (Exit mode 0)
Current function value: -584.2755760986868
Iterations: 7
Function evaluations: 51
Gradient evaluations: 7
Iteration: 1, Func. Count: 5, Neg. LLF: -584.9507002596004
Iteration: 2, Func. Count: 17, Neg. LLF: -585.44959730723
Iteration: 3, Func. Count: 24, Neg. LLF: -585.4571433192997
Iteration: 4, Func. Count: 31, Neg. LLF: -585.4636691626547
Iteration: 5, Func. Count: 38, Neg. LLF: -585.4677858072552
Iteration: 6, Func. Count: 45, Neg. LLF: -585.4679330603055
Iteration: 7, Func. Count: 53, Neg. LLF: -585.4684806948308
Iteration: 8, Func. Count: 58, Neg. LLF: -585.468953593828
Iteration: 9, Func. Count: 63, Neg. LLF: -585.4689605453202
Optimization terminated successfully. (Exit mode 0)
Current function value: -585.4689605453518
Iterations: 9
Function evaluations: 63
Gradient evaluations: 9
Iteration: 1, Func. Count: 5, Neg. LLF: -585.3045370830409
Iteration: 2, Func. Count: 17, Neg. LLF: -585.8657163683505
Iteration: 3, Func. Count: 29, Neg. LLF: -585.8657292532084
Positive directional derivative for linesearch (Exit mode 8)
Current function value: -585.8657292634493
Iterations: 7
Function evaluations: 29
Gradient evaluations: 3
Iteration: 1, Func. Count: 5, Neg. LLF: -587.3293723093961
Iteration: 2, Func. Count: 17, Neg. LLF: -587.9023159283649
Iteration: 3, Func. Count: 29, Neg. LLF: -587.9023246789341
Positive directional derivative for linesearch (Exit mode 8)
Current function value: -587.9023246861852
Iterations: 7
Function evaluations: 29
Gradient evaluations: 3
Iteration: 1, Func. Count: 5, Neg. LLF: -587.6079333156513
Iteration: 2, Func. Count: 17, Neg. LLF: -588.1432768220337
Iteration: 3, Func. Count: 29, Neg. LLF: -588.1432845818082
Positive directional derivative for linesearch (Exit mode 8)
Current function value: -588.1432845828979
Iterations: 7
Function evaluations: 29
Gradient evaluations: 3
Iteration: 1, Func. Count: 5, Neg. LLF: -588.630554263241
Iteration: 2, Func. Count: 17, Neg. LLF: -589.1616138754847
/Users/austincostello1/anaconda3/lib/python3.6/site-packages/arch/univariate/base.py:522: ConvergenceWarning: The optimizer returned code 8. The message is: Positive directional derivative for linesearch See scipy.optimize.fmin_slsqp for code meaning. ConvergenceWarning) /Users/austincostello1/anaconda3/lib/python3.6/site-packages/arch/univariate/base.py:522: ConvergenceWarning: The optimizer returned code 8. The message is: Positive directional derivative for linesearch See scipy.optimize.fmin_slsqp for code meaning. ConvergenceWarning) /Users/austincostello1/anaconda3/lib/python3.6/site-packages/arch/univariate/base.py:522: ConvergenceWarning: The optimizer returned code 8. The message is: Positive directional derivative for linesearch See scipy.optimize.fmin_slsqp for code meaning. ConvergenceWarning) /Users/austincostello1/anaconda3/lib/python3.6/site-packages/arch/univariate/base.py:522: ConvergenceWarning: The optimizer returned code 8. The message is: Positive directional derivative for linesearch See scipy.optimize.fmin_slsqp for code meaning. ConvergenceWarning)
Iteration: 3, Func. Count: 29, Neg. LLF: -589.1616175986962
Iteration: 4, Func. Count: 36, Neg. LLF: -589.2036918329529
Iteration: 5, Func. Count: 48, Neg. LLF: -589.2189501597406
Positive directional derivative for linesearch (Exit mode 8)
Current function value: -589.2189501490691
Iterations: 9
Function evaluations: 48
Gradient evaluations: 5
Iteration: 1, Func. Count: 5, Neg. LLF: -589.0744070971882
Iteration: 2, Func. Count: 17, Neg. LLF: -589.5790785258804
Iteration: 3, Func. Count: 29, Neg. LLF: -589.579081881853
Positive directional derivative for linesearch (Exit mode 8)
Current function value: -589.5790818772899
Iterations: 7
Function evaluations: 29
Gradient evaluations: 3
Iteration: 1, Func. Count: 5, Neg. LLF: -590.1306255875475
Iteration: 2, Func. Count: 17, Neg. LLF: -590.6224474033987
Iteration: 3, Func. Count: 26, Neg. LLF: -590.6225680443655
Iteration: 4, Func. Count: 34, Neg. LLF: -590.6267592549556
Iteration: 5, Func. Count: 43, Neg. LLF: -590.6337365110498
Iteration: 6, Func. Count: 51, Neg. LLF: -590.6372792683171
Iteration: 7, Func. Count: 63, Neg. LLF: -590.6400287266513
Positive directional derivative for linesearch (Exit mode 8)
Current function value: -590.6400287175691
Iterations: 11
Function evaluations: 63
Gradient evaluations: 7
Iteration: 1, Func. Count: 5, Neg. LLF: -589.8271243193981
Iteration: 2, Func. Count: 17, Neg. LLF: -590.3826153567777
Iteration: 3, Func. Count: 24, Neg. LLF: -590.3836362264565
Iteration: 4, Func. Count: 30, Neg. LLF: -590.3897773523852
Iteration: 5, Func. Count: 38, Neg. LLF: -590.3905795955858
Iteration: 6, Func. Count: 44, Neg. LLF: -590.394238685103
Iteration: 7, Func. Count: 49, Neg. LLF: -590.3947359836382
Optimization terminated successfully. (Exit mode 0)
Current function value: -590.3947359836218
Iterations: 7
Function evaluations: 49
Gradient evaluations: 7
/Users/austincostello1/anaconda3/lib/python3.6/site-packages/arch/univariate/base.py:522: ConvergenceWarning: The optimizer returned code 8. The message is: Positive directional derivative for linesearch See scipy.optimize.fmin_slsqp for code meaning. ConvergenceWarning) /Users/austincostello1/anaconda3/lib/python3.6/site-packages/arch/univariate/base.py:522: ConvergenceWarning: The optimizer returned code 8. The message is: Positive directional derivative for linesearch See scipy.optimize.fmin_slsqp for code meaning. ConvergenceWarning)
Iteration: 1, Func. Count: 5, Neg. LLF: -589.9916881035546
Iteration: 2, Func. Count: 17, Neg. LLF: -590.5446617593333
Iteration: 3, Func. Count: 25, Neg. LLF: -590.5456731690667
Iteration: 4, Func. Count: 32, Neg. LLF: -590.5542447815947
Iteration: 5, Func. Count: 40, Neg. LLF: -590.5543757974633
Optimization terminated successfully. (Exit mode 0)
Current function value: -590.5543758081026
Iterations: 5
Function evaluations: 47
Gradient evaluations: 5
Iteration: 1, Func. Count: 5, Neg. LLF: -588.9296976759908
Iteration: 2, Func. Count: 17, Neg. LLF: -589.5282719941673
Iteration: 3, Func. Count: 25, Neg. LLF: -589.5288635051884
Iteration: 4, Func. Count: 32, Neg. LLF: -589.5310792888574
Iteration: 5, Func. Count: 41, Neg. LLF: -589.5310970385186
Iteration: 6, Func. Count: 46, Neg. LLF: -589.531139356015
Optimization terminated successfully. (Exit mode 0)
Current function value: -589.5311393559253
Iterations: 6
Function evaluations: 46
Gradient evaluations: 6
Iteration: 1, Func. Count: 5, Neg. LLF: -587.8993225420672
Iteration: 2, Func. Count: 17, Neg. LLF: -588.425956818754
Iteration: 3, Func. Count: 29, Neg. LLF: -588.4259984591029
Iteration: 4, Func. Count: 36, Neg. LLF: -588.4322752335108
Iteration: 5, Func. Count: 44, Neg. LLF: -588.43261725851
Iteration: 6, Func. Count: 51, Neg. LLF: -588.4343499724786
Iteration: 7, Func. Count: 60, Neg. LLF: -588.4343599525774
Iteration: 8, Func. Count: 65, Neg. LLF: -588.4344052555188
Optimization terminated successfully. (Exit mode 0)
Current function value: -588.4344052555266
Iterations: 9
Function evaluations: 65
Gradient evaluations: 8
Iteration: 1, Func. Count: 5, Neg. LLF: -587.2805101111779
Iteration: 2, Func. Count: 17, Neg. LLF: -587.8740944058368
Iteration: 3, Func. Count: 29, Neg. LLF: -587.8741529060811
Iteration: 4, Func. Count: 38, Neg. LLF: -587.8742783756147
Iteration: 5, Func. Count: 45, Neg. LLF: -587.8771899528556
Iteration: 6, Func. Count: 53, Neg. LLF: -587.8775895901207
Iteration: 7, Func. Count: 61, Neg. LLF: -587.8778390633627
Iteration: 8, Func. Count: 69, Neg. LLF: -587.8781989823486
Iteration: 9, Func. Count: 74, Neg. LLF: -587.8782085907283
Iteration: 10, Func. Count: 79, Neg. LLF: -587.8782097088867
Optimization terminated successfully. (Exit mode 0)
Current function value: -587.8782097088549
Iterations: 12
Function evaluations: 79
Gradient evaluations: 10
Iteration: 1, Func. Count: 5, Neg. LLF: -587.2545301956243
Iteration: 2, Func. Count: 17, Neg. LLF: -587.8097631044309
Iteration: 3, Func. Count: 29, Neg. LLF: -587.8098018158787
Positive directional derivative for linesearch (Exit mode 8)
Current function value: -587.8098018201036
Iterations: 7
Function evaluations: 29
Gradient evaluations: 3
Iteration: 1, Func. Count: 5, Neg. LLF: -588.5755584450899
Iteration: 2, Func. Count: 17, Neg. LLF: -589.1411512219656
Iteration: 3, Func. Count: 29, Neg. LLF: -589.1412054087536
Iteration: 4, Func. Count: 39, Neg. LLF: -589.1412245578388
Iteration: 5, Func. Count: 51, Neg. LLF: -589.1412371271647
Positive directional derivative for linesearch (Exit mode 8)
Current function value: -589.1412371285146
Iterations: 9
Function evaluations: 51
Gradient evaluations: 5
Iteration: 1, Func. Count: 5, Neg. LLF: -588.4959113683087
Iteration: 2, Func. Count: 17, Neg. LLF: -589.042709603827
Iteration: 3, Func. Count: 24, Neg. LLF: -589.0515684220734
Iteration: 4, Func. Count: 31, Neg. LLF: -589.0529939800988
Iteration: 5, Func. Count: 39, Neg. LLF: -589.0531726586271
Iteration: 6, Func. Count: 45, Neg. LLF: -589.0531877568985
Iteration: 7, Func. Count: 51, Neg. LLF: -589.0531955651577
Optimization terminated successfully. (Exit mode 0)
Current function value: -589.0531955651526
Iterations: 7
Function evaluations: 51
Gradient evaluations: 7
Iteration: 1, Func. Count: 5, Neg. LLF: -585.6500175299448
Iteration: 2, Func. Count: 17, Neg. LLF: -586.1149953673755
Iteration: 3, Func. Count: 26, Neg. LLF: -586.1162925563441
Iteration: 4, Func. Count: 33, Neg. LLF: -586.1202005214911
Iteration: 5, Func. Count: 40, Neg. LLF: -586.1202158280801
Iteration: 6, Func. Count: 45, Neg. LLF: -586.1202258217593
Optimization terminated successfully. (Exit mode 0)
Current function value: -586.1202258217356
Iterations: 6
Function evaluations: 45
Gradient evaluations: 6
/Users/austincostello1/anaconda3/lib/python3.6/site-packages/arch/univariate/base.py:522: ConvergenceWarning: The optimizer returned code 8. The message is: Positive directional derivative for linesearch See scipy.optimize.fmin_slsqp for code meaning. ConvergenceWarning) /Users/austincostello1/anaconda3/lib/python3.6/site-packages/arch/univariate/base.py:522: ConvergenceWarning: The optimizer returned code 8. The message is: Positive directional derivative for linesearch See scipy.optimize.fmin_slsqp for code meaning. ConvergenceWarning)
Iteration: 1, Func. Count: 5, Neg. LLF: -586.053852578268
Iteration: 2, Func. Count: 17, Neg. LLF: -586.457419512763
Iteration: 3, Func. Count: 25, Neg. LLF: -586.4596942833998
Iteration: 4, Func. Count: 31, Neg. LLF: -586.4734107816391
Iteration: 5, Func. Count: 39, Neg. LLF: -586.473684011756
Iteration: 6, Func. Count: 44, Neg. LLF: -586.4777469658995
Iteration: 7, Func. Count: 49, Neg. LLF: -586.4783433623363
Iteration: 8, Func. Count: 54, Neg. LLF: -586.4783443810086
Optimization terminated successfully. (Exit mode 0)
Current function value: -586.4783443810062
Iterations: 8
Function evaluations: 54
Gradient evaluations: 8
Iteration: 1, Func. Count: 5, Neg. LLF: -586.0596833532718
Iteration: 2, Func. Count: 17, Neg. LLF: -586.4657828566831
Iteration: 3, Func. Count: 25, Neg. LLF: -586.4665534895187
Iteration: 4, Func. Count: 32, Neg. LLF: -586.4790303331275
Iteration: 5, Func. Count: 40, Neg. LLF: -586.4797976547563
Iteration: 6, Func. Count: 45, Neg. LLF: -586.4797987346531
Optimization terminated successfully. (Exit mode 0)
Current function value: -586.4797987346395
Iterations: 6
Function evaluations: 45
Gradient evaluations: 6
Iteration: 1, Func. Count: 5, Neg. LLF: -585.0746024968043
Iteration: 2, Func. Count: 17, Neg. LLF: -585.4337067840618
Iteration: 3, Func. Count: 26, Neg. LLF: -585.4350452522672
Iteration: 4, Func. Count: 33, Neg. LLF: -585.4420193849437
Iteration: 5, Func. Count: 39, Neg. LLF: -585.44202132077
Iteration: 6, Func. Count: 45, Neg. LLF: -585.4420264734244
Optimization terminated successfully. (Exit mode 0)
Current function value: -585.4420264734199
Iterations: 6
Function evaluations: 45
Gradient evaluations: 6
Iteration: 1, Func. Count: 5, Neg. LLF: -583.8558084894851
Iteration: 2, Func. Count: 17, Neg. LLF: -584.2364254822949
Iteration: 3, Func. Count: 26, Neg. LLF: -584.2369597989548
Iteration: 4, Func. Count: 33, Neg. LLF: -584.2388760425038
Iteration: 5, Func. Count: 38, Neg. LLF: -584.2388805017886
Optimization terminated successfully. (Exit mode 0)
Current function value: -584.2388807835207
Iterations: 5
Function evaluations: 41
Gradient evaluations: 5
Iteration: 1, Func. Count: 5, Neg. LLF: -582.5926801478656
Iteration: 2, Func. Count: 17, Neg. LLF: -583.0469833590862
Iteration: 3, Func. Count: 25, Neg. LLF: -583.0475754555789
Iteration: 4, Func. Count: 32, Neg. LLF: -583.0480613760699
Iteration: 5, Func. Count: 41, Neg. LLF: -583.0480696755851
Iteration: 6, Func. Count: 46, Neg. LLF: -583.0482513261516
Optimization terminated successfully. (Exit mode 0)
Current function value: -583.0482513261624
Iterations: 6
Function evaluations: 46
Gradient evaluations: 6
Iteration: 1, Func. Count: 5, Neg. LLF: -582.0651528332689
Iteration: 2, Func. Count: 17, Neg. LLF: -582.4714344394149
Iteration: 3, Func. Count: 25, Neg. LLF: -582.4717283902878
Iteration: 4, Func. Count: 32, Neg. LLF: -582.4723398149818
Iteration: 5, Func. Count: 41, Neg. LLF: -582.4723420318387
Iteration: 6, Func. Count: 46, Neg. LLF: -582.4723504940173
Optimization terminated successfully. (Exit mode 0)
Current function value: -582.4723504940157
Iterations: 6
Function evaluations: 46
Gradient evaluations: 6
Iteration: 1, Func. Count: 5, Neg. LLF: -581.6169375563229
Iteration: 2, Func. Count: 17, Neg. LLF: -582.0045598728725
Iteration: 3, Func. Count: 27, Neg. LLF: -582.004563033214
Iteration: 4, Func. Count: 34, Neg. LLF: -582.0063153461448
Iteration: 5, Func. Count: 41, Neg. LLF: -582.0063189927725
Iteration: 6, Func. Count: 46, Neg. LLF: -582.0063252554223
Optimization terminated successfully. (Exit mode 0)
Current function value: -582.0063252554035
Iterations: 6
Function evaluations: 46
Gradient evaluations: 6
Iteration: 1, Func. Count: 5, Neg. LLF: -580.8876927804279
Iteration: 2, Func. Count: 17, Neg. LLF: -581.2370563715453
Iteration: 3, Func. Count: 26, Neg. LLF: -581.2371728937043
Iteration: 4, Func. Count: 33, Neg. LLF: -581.2386037204521
Iteration: 5, Func. Count: 39, Neg. LLF: -581.2386049781807
Iteration: 6, Func. Count: 44, Neg. LLF: -581.2386097713272
Optimization terminated successfully. (Exit mode 0)
Current function value: -581.2386097712757
Iterations: 6
Function evaluations: 44
Gradient evaluations: 6
Iteration: 1, Func. Count: 5, Neg. LLF: -580.7897288265098
Iteration: 2, Func. Count: 17, Neg. LLF: -581.1572207387428
Iteration: 3, Func. Count: 26, Neg. LLF: -581.1577475618406
Iteration: 4, Func. Count: 33, Neg. LLF: -581.1642670573152
Iteration: 5, Func. Count: 38, Neg. LLF: -581.1642738075508
Optimization terminated successfully. (Exit mode 0)
Current function value: -581.1642742432923
Iterations: 5
Function evaluations: 41
Gradient evaluations: 5
Iteration: 1, Func. Count: 5, Neg. LLF: -580.9382417598692
Iteration: 2, Func. Count: 17, Neg. LLF: -581.3089754580614
Iteration: 3, Func. Count: 26, Neg. LLF: -581.3092179763382
Iteration: 4, Func. Count: 33, Neg. LLF: -581.3141309711087
Optimization terminated successfully. (Exit mode 0)
Current function value: -581.3141314400383
Iterations: 4
Function evaluations: 35
Gradient evaluations: 4
Iteration: 1, Func. Count: 5, Neg. LLF: -580.2298683645947
Iteration: 2, Func. Count: 17, Neg. LLF: -580.6024232760968
Iteration: 3, Func. Count: 26, Neg. LLF: -580.6041888278756
Iteration: 4, Func. Count: 33, Neg. LLF: -580.611768939708
Iteration: 5, Func. Count: 40, Neg. LLF: -580.6118919882626
Iteration: 6, Func. Count: 46, Neg. LLF: -580.6119932872767
Optimization terminated successfully. (Exit mode 0)
Current function value: -580.6119940377237
Iterations: 6
Function evaluations: 47
Gradient evaluations: 6
Iteration: 1, Func. Count: 5, Neg. LLF: -579.845179289885
Iteration: 2, Func. Count: 17, Neg. LLF: -580.2303176916134
Iteration: 3, Func. Count: 25, Neg. LLF: -580.2332626384748
Iteration: 4, Func. Count: 32, Neg. LLF: -580.2421183722888
Iteration: 5, Func. Count: 39, Neg. LLF: -580.244478705624
Iteration: 6, Func. Count: 46, Neg. LLF: -580.2464338684131
Iteration: 7, Func. Count: 51, Neg. LLF: -580.2467687920777
Iteration: 8, Func. Count: 56, Neg. LLF: -580.2467748378335
Optimization terminated successfully. (Exit mode 0)
Current function value: -580.2467750647565
Iterations: 8
Function evaluations: 57
Gradient evaluations: 8
Iteration: 1, Func. Count: 5, Neg. LLF: -579.2565670152967
Iteration: 2, Func. Count: 17, Neg. LLF: -579.6668113562741
Iteration: 3, Func. Count: 25, Neg. LLF: -579.6687969529385
Iteration: 4, Func. Count: 32, Neg. LLF: -579.6754265658741
Iteration: 5, Func. Count: 40, Neg. LLF: -579.6759499869097
Iteration: 6, Func. Count: 45, Neg. LLF: -579.6759542819215
Optimization terminated successfully. (Exit mode 0)
Current function value: -579.6759542819037
Iterations: 6
Function evaluations: 45
Gradient evaluations: 6
Iteration: 1, Func. Count: 5, Neg. LLF: -578.9988919606276
Iteration: 2, Func. Count: 17, Neg. LLF: -579.4769418328904
Iteration: 3, Func. Count: 26, Neg. LLF: -579.4782709769524
Iteration: 4, Func. Count: 33, Neg. LLF: -579.4795156817547
Iteration: 5, Func. Count: 39, Neg. LLF: -579.4795183536851
Iteration: 6, Func. Count: 44, Neg. LLF: -579.4795214501951
Optimization terminated successfully. (Exit mode 0)
Current function value: -579.4795214501623
Iterations: 6
Function evaluations: 44
Gradient evaluations: 6
Iteration: 1, Func. Count: 5, Neg. LLF: -579.2807533369805
Iteration: 2, Func. Count: 17, Neg. LLF: -579.713600736326
Iteration: 3, Func. Count: 26, Neg. LLF: -579.7154008645659
Iteration: 4, Func. Count: 33, Neg. LLF: -579.7216644450032
Iteration: 5, Func. Count: 39, Neg. LLF: -579.7216700789004
Iteration: 6, Func. Count: 44, Neg. LLF: -579.7216775397455
Iteration: 7, Func. Count: 49, Neg. LLF: -579.7216789729914
Optimization terminated successfully. (Exit mode 0)
Current function value: -579.7216789729914
Iterations: 7
Function evaluations: 49
Gradient evaluations: 7
Iteration: 1, Func. Count: 5, Neg. LLF: -578.1377658194212
Iteration: 2, Func. Count: 17, Neg. LLF: -578.4858909097741
Positive directional derivative for linesearch (Exit mode 8)
Current function value: -578.4858908961351
Iterations: 6
Function evaluations: 17
Gradient evaluations: 2
Iteration: 1, Func. Count: 5, Neg. LLF: -575.5751055271651
Iteration: 2, Func. Count: 17, Neg. LLF: -576.0940791285734
Iteration: 3, Func. Count: 25, Neg. LLF: -576.0995494449007
Iteration: 4, Func. Count: 32, Neg. LLF: -576.0998612943124
Iteration: 5, Func. Count: 40, Neg. LLF: -576.1000714074945
Iteration: 6, Func. Count: 45, Neg. LLF: -576.1002702415851
Iteration: 7, Func. Count: 51, Neg. LLF: -576.100287279434
Optimization terminated successfully. (Exit mode 0)
Current function value: -576.1002878385767
Iterations: 7
Function evaluations: 52
Gradient evaluations: 7
Iteration: 1, Func. Count: 5, Neg. LLF: -574.6666197924023
Iteration: 2, Func. Count: 17, Neg. LLF: -575.1408232308329
Positive directional derivative for linesearch (Exit mode 8)
Current function value: -575.1408232052336
Iterations: 6
Function evaluations: 17
Gradient evaluations: 2
Iteration: 1, Func. Count: 5, Neg. LLF: -574.5288215245646
Iteration: 2, Func. Count: 17, Neg. LLF: -574.9817866659321
Iteration: 3, Func. Count: 25, Neg. LLF: -574.9869528441077
Iteration: 4, Func. Count: 33, Neg. LLF: -574.9946257227288
Iteration: 5, Func. Count: 45, Neg. LLF: -574.9948698938235
Positive directional derivative for linesearch (Exit mode 8)
Current function value: -574.9948698754333
Iterations: 9
Function evaluations: 45
Gradient evaluations: 5
/Users/austincostello1/anaconda3/lib/python3.6/site-packages/arch/univariate/base.py:522: ConvergenceWarning: The optimizer returned code 8. The message is: Positive directional derivative for linesearch See scipy.optimize.fmin_slsqp for code meaning. ConvergenceWarning) /Users/austincostello1/anaconda3/lib/python3.6/site-packages/arch/univariate/base.py:522: ConvergenceWarning: The optimizer returned code 8. The message is: Positive directional derivative for linesearch See scipy.optimize.fmin_slsqp for code meaning. ConvergenceWarning) /Users/austincostello1/anaconda3/lib/python3.6/site-packages/arch/univariate/base.py:522: ConvergenceWarning: The optimizer returned code 8. The message is: Positive directional derivative for linesearch See scipy.optimize.fmin_slsqp for code meaning. ConvergenceWarning)
Iteration: 1, Func. Count: 5, Neg. LLF: -574.2668071057344
Iteration: 2, Func. Count: 17, Neg. LLF: -574.7342033481063
Iteration: 3, Func. Count: 24, Neg. LLF: -574.7467602694028
Iteration: 4, Func. Count: 32, Neg. LLF: -574.7610996670691
Iteration: 5, Func. Count: 44, Neg. LLF: -574.7612023038545
Positive directional derivative for linesearch (Exit mode 8)
Current function value: -574.7612022869578
Iterations: 9
Function evaluations: 44
Gradient evaluations: 5
Iteration: 1, Func. Count: 5, Neg. LLF: -570.9935947525379
Iteration: 2, Func. Count: 17, Neg. LLF: -571.3684274306343
Iteration: 3, Func. Count: 24, Neg. LLF: -571.4117172470158
Iteration: 4, Func. Count: 31, Neg. LLF: -571.417261384934
Iteration: 5, Func. Count: 39, Neg. LLF: -571.4174671042208
Iteration: 6, Func. Count: 47, Neg. LLF: -571.4176067177655
Iteration: 7, Func. Count: 52, Neg. LLF: -571.4176222810144
Optimization terminated successfully. (Exit mode 0)
Current function value: -571.4176222810186
Iterations: 7
Function evaluations: 52
Gradient evaluations: 7
Iteration: 1, Func. Count: 5, Neg. LLF: -570.1375402784951
Iteration: 2, Func. Count: 17, Neg. LLF: -570.4422070052436
Positive directional derivative for linesearch (Exit mode 8)
Current function value: -570.4422069804615
Iterations: 6
Function evaluations: 17
Gradient evaluations: 2
Iteration: 1, Func. Count: 5, Neg. LLF: -568.9673385551166
Iteration: 2, Func. Count: 17, Neg. LLF: -569.3542780400454
Iteration: 3, Func. Count: 25, Neg. LLF: -569.36130906661
Iteration: 4, Func. Count: 32, Neg. LLF: -569.3635056765388
Iteration: 5, Func. Count: 40, Neg. LLF: -569.3636090659845
Iteration: 6, Func. Count: 45, Neg. LLF: -569.3636240505718
Optimization terminated successfully. (Exit mode 0)
Current function value: -569.3636240505803
Iterations: 6
Function evaluations: 45
Gradient evaluations: 6
/Users/austincostello1/anaconda3/lib/python3.6/site-packages/arch/univariate/base.py:522: ConvergenceWarning: The optimizer returned code 8. The message is: Positive directional derivative for linesearch See scipy.optimize.fmin_slsqp for code meaning. ConvergenceWarning) /Users/austincostello1/anaconda3/lib/python3.6/site-packages/arch/univariate/base.py:522: ConvergenceWarning: The optimizer returned code 8. The message is: Positive directional derivative for linesearch See scipy.optimize.fmin_slsqp for code meaning. ConvergenceWarning)
Iteration: 1, Func. Count: 5, Neg. LLF: -569.250328457035
Iteration: 2, Func. Count: 17, Neg. LLF: -569.6053181738945
Iteration: 3, Func. Count: 25, Neg. LLF: -569.6127712906548
Iteration: 4, Func. Count: 32, Neg. LLF: -569.6130653837597
Iteration: 5, Func. Count: 40, Neg. LLF: -569.613096186601
Iteration: 6, Func. Count: 45, Neg. LLF: -569.6130975866349
Optimization terminated successfully. (Exit mode 0)
Current function value: -569.6130975866377
Iterations: 6
Function evaluations: 45
Gradient evaluations: 6
Iteration: 1, Func. Count: 5, Neg. LLF: -568.3441728122765
Iteration: 2, Func. Count: 17, Neg. LLF: -568.6957459164007
Positive directional derivative for linesearch (Exit mode 8)
Current function value: -568.695745910444
Iterations: 6
Function evaluations: 17
Gradient evaluations: 2
Iteration: 1, Func. Count: 5, Neg. LLF: -568.9057727544341
Iteration: 2, Func. Count: 17, Neg. LLF: -569.239365234208
Iteration: 3, Func. Count: 24, Neg. LLF: -569.2487547467714
Iteration: 4, Func. Count: 31, Neg. LLF: -569.25060025433
Iteration: 5, Func. Count: 39, Neg. LLF: -569.2509985500479
Iteration: 6, Func. Count: 44, Neg. LLF: -569.2510006931882
Optimization terminated successfully. (Exit mode 0)
Current function value: -569.2510006931723
Iterations: 6
Function evaluations: 44
Gradient evaluations: 6
Iteration: 1, Func. Count: 5, Neg. LLF: -566.6880396904183
Iteration: 2, Func. Count: 17, Neg. LLF: -566.9337398867572
Iteration: 3, Func. Count: 24, Neg. LLF: -566.9761431000226
Iteration: 4, Func. Count: 31, Neg. LLF: -566.980811348607
Iteration: 5, Func. Count: 39, Neg. LLF: -566.9854601147852
Iteration: 6, Func. Count: 46, Neg. LLF: -566.9858710893623
Iteration: 7, Func. Count: 51, Neg. LLF: -566.9862930114881
Iteration: 8, Func. Count: 56, Neg. LLF: -566.9863673211469
Optimization terminated successfully. (Exit mode 0)
Current function value: -566.986367321134
Iterations: 8
Function evaluations: 56
Gradient evaluations: 8
/Users/austincostello1/anaconda3/lib/python3.6/site-packages/arch/univariate/base.py:522: ConvergenceWarning: The optimizer returned code 8. The message is: Positive directional derivative for linesearch See scipy.optimize.fmin_slsqp for code meaning. ConvergenceWarning) /Users/austincostello1/anaconda3/lib/python3.6/site-packages/arch/univariate/base.py:522: ConvergenceWarning: The optimizer returned code 8. The message is: Positive directional derivative for linesearch See scipy.optimize.fmin_slsqp for code meaning. ConvergenceWarning)
Iteration: 1, Func. Count: 5, Neg. LLF: -565.8068521610885
Iteration: 2, Func. Count: 17, Neg. LLF: -566.0950987539454
Positive directional derivative for linesearch (Exit mode 8)
Current function value: -566.0950987256263
Iterations: 6
Function evaluations: 17
Gradient evaluations: 2
Iteration: 1, Func. Count: 5, Neg. LLF: -566.0935281475001
Iteration: 2, Func. Count: 17, Neg. LLF: -566.375292067804
Iteration: 3, Func. Count: 25, Neg. LLF: -566.386657381693
Iteration: 4, Func. Count: 33, Neg. LLF: -566.3866803144335
Iteration: 5, Func. Count: 41, Neg. LLF: -566.3867188115303
Optimization terminated successfully. (Exit mode 0)
Current function value: -566.3867188115285
Iterations: 5
Function evaluations: 41
Gradient evaluations: 5
Iteration: 1, Func. Count: 5, Neg. LLF: -565.5168446148195
Iteration: 2, Func. Count: 17, Neg. LLF: -565.8359886405899
Iteration: 3, Func. Count: 25, Neg. LLF: -565.8479994556201
Iteration: 4, Func. Count: 32, Neg. LLF: -565.848298471319
Iteration: 5, Func. Count: 40, Neg. LLF: -565.8484996859579
Optimization terminated successfully. (Exit mode 0)
Current function value: -565.8485005277278
Iterations: 5
Function evaluations: 41
Gradient evaluations: 5
Iteration: 1, Func. Count: 5, Neg. LLF: -565.2783040503982
Iteration: 2, Func. Count: 17, Neg. LLF: -565.5817601574906
Iteration: 3, Func. Count: 25, Neg. LLF: -565.5959200378504
Iteration: 4, Func. Count: 32, Neg. LLF: -565.5963353669476
Iteration: 5, Func. Count: 39, Neg. LLF: -565.598753869877
Iteration: 6, Func. Count: 45, Neg. LLF: -565.599097602265
Iteration: 7, Func. Count: 50, Neg. LLF: -565.599306764711
Optimization terminated successfully. (Exit mode 0)
Current function value: -565.5993067646989
Iterations: 7
Function evaluations: 50
Gradient evaluations: 7
Iteration: 1, Func. Count: 5, Neg. LLF: -562.1799305769234
Iteration: 2, Func. Count: 17, Neg. LLF: -562.3817652812027
Iteration: 3, Func. Count: 24, Neg. LLF: -562.403253622479
Iteration: 4, Func. Count: 30, Neg. LLF: -562.4103069418961
Iteration: 5, Func. Count: 38, Neg. LLF: -562.4105978639973
Iteration: 6, Func. Count: 45, Neg. LLF: -562.4107702351268
Iteration: 7, Func. Count: 50, Neg. LLF: -562.4109402446093
Optimization terminated successfully. (Exit mode 0)
Current function value: -562.4109402446368
Iterations: 7
Function evaluations: 50
Gradient evaluations: 7
Iteration: 1, Func. Count: 5, Neg. LLF: -562.318050018594
Iteration: 2, Func. Count: 17, Neg. LLF: -562.524302091353
Iteration: 3, Func. Count: 24, Neg. LLF: -562.5334486338666
Iteration: 4, Func. Count: 31, Neg. LLF: -562.5346141094991
Iteration: 5, Func. Count: 39, Neg. LLF: -562.5346411249496
Iteration: 6, Func. Count: 46, Neg. LLF: -562.5351244549539
Iteration: 7, Func. Count: 51, Neg. LLF: -562.5351650733135
Optimization terminated successfully. (Exit mode 0)
Current function value: -562.5351650733237
Iterations: 7
Function evaluations: 51
Gradient evaluations: 7
Iteration: 1, Func. Count: 5, Neg. LLF: -560.4630017659033
Iteration: 2, Func. Count: 17, Neg. LLF: -560.6510931191938
Iteration: 3, Func. Count: 25, Neg. LLF: -560.6555004680636
Iteration: 4, Func. Count: 33, Neg. LLF: -560.655502898886
Iteration: 5, Func. Count: 41, Neg. LLF: -560.6555687347802
Optimization terminated successfully. (Exit mode 0)
Current function value: -560.6555687347578
Iterations: 5
Function evaluations: 41
Gradient evaluations: 5
Iteration: 1, Func. Count: 5, Neg. LLF: -560.082019633845
Iteration: 2, Func. Count: 17, Neg. LLF: -560.2424402466436
Iteration: 3, Func. Count: 25, Neg. LLF: -560.2494289820647
Iteration: 4, Func. Count: 32, Neg. LLF: -560.2540907444213
Iteration: 5, Func. Count: 40, Neg. LLF: -560.2546586023207
Iteration: 6, Func. Count: 46, Neg. LLF: -560.2546969513766
Optimization terminated successfully. (Exit mode 0)
Current function value: -560.2546974550877
Iterations: 6
Function evaluations: 47
Gradient evaluations: 6
Iteration: 1, Func. Count: 5, Neg. LLF: -559.9995950888697
Iteration: 2, Func. Count: 16, Neg. LLF: -560.0611797868069
Iteration: 3, Func. Count: 24, Neg. LLF: -560.0715037843283
Iteration: 4, Func. Count: 30, Neg. LLF: -560.1437470050543
Iteration: 5, Func. Count: 37, Neg. LLF: -560.1827498013434
Iteration: 6, Func. Count: 44, Neg. LLF: -560.1845314658078
Iteration: 7, Func. Count: 49, Neg. LLF: -560.184575996595
Iteration: 8, Func. Count: 54, Neg. LLF: -560.1845892886249
Optimization terminated successfully. (Exit mode 0)
Current function value: -560.1845892886249
Iterations: 8
Function evaluations: 54
Gradient evaluations: 8
Iteration: 1, Func. Count: 5, Neg. LLF: -558.0877261077044
Iteration: 2, Func. Count: 16, Neg. LLF: -558.155523351129
Iteration: 3, Func. Count: 23, Neg. LLF: -558.1703832781293
Iteration: 4, Func. Count: 31, Neg. LLF: -558.1706727806634
Iteration: 5, Func. Count: 40, Neg. LLF: -558.2306111198978
Iteration: 6, Func. Count: 48, Neg. LLF: -558.2306541774824
Iteration: 7, Func. Count: 53, Neg. LLF: -558.230675454392
Optimization terminated successfully. (Exit mode 0)
Current function value: -558.2306754544106
Iterations: 7
Function evaluations: 53
Gradient evaluations: 7
Iteration: 1, Func. Count: 5, Neg. LLF: -557.3157400142534
Iteration: 2, Func. Count: 16, Neg. LLF: -557.3963929846735
Iteration: 3, Func. Count: 23, Neg. LLF: -557.4048333641763
Iteration: 4, Func. Count: 30, Neg. LLF: -557.405645098384
Iteration: 5, Func. Count: 37, Neg. LLF: -557.4683419894202
Iteration: 6, Func. Count: 44, Neg. LLF: -557.4685114855256
Iteration: 7, Func. Count: 51, Neg. LLF: -557.4699681474799
Iteration: 8, Func. Count: 56, Neg. LLF: -557.4699735688029
Optimization terminated successfully. (Exit mode 0)
Current function value: -557.4699735688114
Iterations: 8
Function evaluations: 56
Gradient evaluations: 8
Iteration: 1, Func. Count: 5, Neg. LLF: -557.2210145601423
Iteration: 2, Func. Count: 16, Neg. LLF: -557.298649961547
Iteration: 3, Func. Count: 23, Neg. LLF: -557.3030858619909
Iteration: 4, Func. Count: 30, Neg. LLF: -557.315677780553
Iteration: 5, Func. Count: 37, Neg. LLF: -557.3645539408983
Iteration: 6, Func. Count: 44, Neg. LLF: -557.365620256136
Iteration: 7, Func. Count: 50, Neg. LLF: -557.3656781247702
Iteration: 8, Func. Count: 55, Neg. LLF: -557.3657143482421
Optimization terminated successfully. (Exit mode 0)
Current function value: -557.3657143481648
Iterations: 8
Function evaluations: 55
Gradient evaluations: 8
Iteration: 1, Func. Count: 5, Neg. LLF: -557.8069280453433
Iteration: 2, Func. Count: 16, Neg. LLF: -557.887805142967
Iteration: 3, Func. Count: 23, Neg. LLF: -557.8994689511001
Iteration: 4, Func. Count: 30, Neg. LLF: -557.9010527196131
Iteration: 5, Func. Count: 37, Neg. LLF: -557.9635435078576
Iteration: 6, Func. Count: 44, Neg. LLF: -557.9639646107638
Iteration: 7, Func. Count: 50, Neg. LLF: -557.9643031709604
Iteration: 8, Func. Count: 55, Neg. LLF: -557.9644438465156
Optimization terminated successfully. (Exit mode 0)
Current function value: -557.9644438465184
Iterations: 8
Function evaluations: 55
Gradient evaluations: 8
Iteration: 1, Func. Count: 5, Neg. LLF: -557.5126672792776
Iteration: 2, Func. Count: 16, Neg. LLF: -557.5860775874958
Iteration: 3, Func. Count: 23, Neg. LLF: -557.5929968695359
Iteration: 4, Func. Count: 30, Neg. LLF: -557.595224798112
Iteration: 5, Func. Count: 37, Neg. LLF: -557.6489223862613
Iteration: 6, Func. Count: 45, Neg. LLF: -557.6490801010316
Iteration: 7, Func. Count: 51, Neg. LLF: -557.6513305699206
Iteration: 8, Func. Count: 56, Neg. LLF: -557.651443867077
Optimization terminated successfully. (Exit mode 0)
Current function value: -557.6514438670907
Iterations: 8
Function evaluations: 56
Gradient evaluations: 8
Iteration: 1, Func. Count: 5, Neg. LLF: -555.7602402953947
Iteration: 2, Func. Count: 16, Neg. LLF: -555.8390118085501
Iteration: 3, Func. Count: 23, Neg. LLF: -555.856928777029
Iteration: 4, Func. Count: 30, Neg. LLF: -555.8583977869007
Iteration: 5, Func. Count: 36, Neg. LLF: -555.8782934424581
Iteration: 6, Func. Count: 43, Neg. LLF: -555.9006398192435
Iteration: 7, Func. Count: 50, Neg. LLF: -555.9039817644309
Iteration: 8, Func. Count: 55, Neg. LLF: -555.9157009573528
Iteration: 9, Func. Count: 60, Neg. LLF: -555.9159385390303
Optimization terminated successfully. (Exit mode 0)
Current function value: -555.915938539069
Iterations: 9
Function evaluations: 60
Gradient evaluations: 9
Iteration: 1, Func. Count: 5, Neg. LLF: -554.9206039057533
Iteration: 2, Func. Count: 16, Neg. LLF: -554.9863368070743
Iteration: 3, Func. Count: 23, Neg. LLF: -555.0145259189865
Iteration: 4, Func. Count: 31, Neg. LLF: -555.0154834960958
Iteration: 5, Func. Count: 38, Neg. LLF: -555.0565799429902
Iteration: 6, Func. Count: 46, Neg. LLF: -555.0568128533733
Iteration: 7, Func. Count: 51, Neg. LLF: -555.0572577829284
Iteration: 8, Func. Count: 56, Neg. LLF: -555.0573568946206
Optimization terminated successfully. (Exit mode 0)
Current function value: -555.0573568946234
Iterations: 8
Function evaluations: 56
Gradient evaluations: 8
Iteration: 1, Func. Count: 5, Neg. LLF: -553.8981858658453
Iteration: 2, Func. Count: 16, Neg. LLF: -553.9510343749207
Iteration: 3, Func. Count: 23, Neg. LLF: -553.9847785143247
Iteration: 4, Func. Count: 31, Neg. LLF: -553.9851108933306
Iteration: 5, Func. Count: 39, Neg. LLF: -554.0126284675438
Iteration: 6, Func. Count: 46, Neg. LLF: -554.0128931100276
Iteration: 7, Func. Count: 51, Neg. LLF: -554.0129947806599
Iteration: 8, Func. Count: 56, Neg. LLF: -554.0130043692166
Optimization terminated successfully. (Exit mode 0)
Current function value: -554.0130043692175
Iterations: 8
Function evaluations: 56
Gradient evaluations: 8
Iteration: 1, Func. Count: 5, Neg. LLF: -553.5863873976484
Iteration: 2, Func. Count: 16, Neg. LLF: -553.6465158040194
Iteration: 3, Func. Count: 23, Neg. LLF: -553.6517036151057
Iteration: 4, Func. Count: 30, Neg. LLF: -553.6630794505863
Iteration: 5, Func. Count: 37, Neg. LLF: -553.6914749854859
Iteration: 6, Func. Count: 45, Neg. LLF: -553.6915008893118
Iteration: 7, Func. Count: 50, Neg. LLF: -553.6915184707937
Optimization terminated successfully. (Exit mode 0)
Current function value: -553.6915184708535
Iterations: 7
Function evaluations: 50
Gradient evaluations: 7
Iteration: 1, Func. Count: 5, Neg. LLF: -553.6446001248441
Iteration: 2, Func. Count: 16, Neg. LLF: -553.7042817460938
Iteration: 3, Func. Count: 24, Neg. LLF: -553.704431430659
Iteration: 4, Func. Count: 31, Neg. LLF: -553.7213708517547
Iteration: 5, Func. Count: 39, Neg. LLF: -553.7215969703575
Iteration: 6, Func. Count: 46, Neg. LLF: -553.7515716426165
Iteration: 7, Func. Count: 53, Neg. LLF: -553.7515738354195
Iteration: 8, Func. Count: 58, Neg. LLF: -553.7516044369152
Optimization terminated successfully. (Exit mode 0)
Current function value: -553.7516044368699
Iterations: 8
Function evaluations: 58
Gradient evaluations: 8
Iteration: 1, Func. Count: 5, Neg. LLF: -552.9761041479021
Iteration: 2, Func. Count: 16, Neg. LLF: -553.0444307531627
Iteration: 3, Func. Count: 23, Neg. LLF: -553.0537464311574
Iteration: 4, Func. Count: 31, Neg. LLF: -553.0546924346471
Iteration: 5, Func. Count: 38, Neg. LLF: -553.0833795178874
Iteration: 6, Func. Count: 45, Neg. LLF: -553.0875818589536
Iteration: 7, Func. Count: 51, Neg. LLF: -553.0878327880718
Iteration: 8, Func. Count: 56, Neg. LLF: -553.0879148407685
Optimization terminated successfully. (Exit mode 0)
Current function value: -553.0879153909998
Iterations: 8
Function evaluations: 57
Gradient evaluations: 8
Iteration: 1, Func. Count: 5, Neg. LLF: -552.2604592187101
Iteration: 2, Func. Count: 16, Neg. LLF: -552.337927931454
Iteration: 3, Func. Count: 23, Neg. LLF: -552.3439375054012
Iteration: 4, Func. Count: 30, Neg. LLF: -552.3452900541608
Iteration: 5, Func. Count: 37, Neg. LLF: -552.3747506735335
Iteration: 6, Func. Count: 44, Neg. LLF: -552.3750090961184
Iteration: 7, Func. Count: 51, Neg. LLF: -552.3827683407163
Iteration: 8, Func. Count: 56, Neg. LLF: -552.3827700388268
Optimization terminated successfully. (Exit mode 0)
Current function value: -552.382770038824
Iterations: 8
Function evaluations: 56
Gradient evaluations: 8
Iteration: 1, Func. Count: 5, Neg. LLF: -552.2358564619706
Iteration: 2, Func. Count: 16, Neg. LLF: -552.3098161286655
Iteration: 3, Func. Count: 23, Neg. LLF: -552.3124527897595
Iteration: 4, Func. Count: 30, Neg. LLF: -552.3244412349077
Iteration: 5, Func. Count: 37, Neg. LLF: -552.3490550721544
Iteration: 6, Func. Count: 44, Neg. LLF: -552.3496223994248
Iteration: 7, Func. Count: 49, Neg. LLF: -552.3496248365128
Optimization terminated successfully. (Exit mode 0)
Current function value: -552.3496248365204
Iterations: 7
Function evaluations: 49
Gradient evaluations: 7
Iteration: 1, Func. Count: 5, Neg. LLF: -551.6385241898963
Iteration: 2, Func. Count: 16, Neg. LLF: -551.7099722228425
Iteration: 3, Func. Count: 23, Neg. LLF: -551.712914392526
Iteration: 4, Func. Count: 30, Neg. LLF: -551.7161525504906
Iteration: 5, Func. Count: 37, Neg. LLF: -551.7460063409054
Iteration: 6, Func. Count: 44, Neg. LLF: -551.7463054554335
Iteration: 7, Func. Count: 50, Neg. LLF: -551.746415515518
Optimization terminated successfully. (Exit mode 0)
Current function value: -551.7464160022939
Iterations: 7
Function evaluations: 51
Gradient evaluations: 7
Iteration: 1, Func. Count: 5, Neg. LLF: -552.203785943804
Iteration: 2, Func. Count: 16, Neg. LLF: -552.2774438180445
Iteration: 3, Func. Count: 23, Neg. LLF: -552.2807958279235
Iteration: 4, Func. Count: 30, Neg. LLF: -552.2902554101203
Iteration: 5, Func. Count: 37, Neg. LLF: -552.3165281814443
Iteration: 6, Func. Count: 44, Neg. LLF: -552.3170752469096
Iteration: 7, Func. Count: 50, Neg. LLF: -552.3171053860858
Iteration: 8, Func. Count: 55, Neg. LLF: -552.3171119400424
Iteration: 9, Func. Count: 60, Neg. LLF: -552.3171131267379
Optimization terminated successfully. (Exit mode 0)
Current function value: -552.3171131267377
Iterations: 9
Function evaluations: 60
Gradient evaluations: 9
Iteration: 1, Func. Count: 5, Neg. LLF: -551.9795166628683
Iteration: 2, Func. Count: 16, Neg. LLF: -552.0526070162671
Iteration: 3, Func. Count: 23, Neg. LLF: -552.0547368798735
Iteration: 4, Func. Count: 30, Neg. LLF: -552.0621616177416
Iteration: 5, Func. Count: 37, Neg. LLF: -552.0893895871902
Iteration: 6, Func. Count: 44, Neg. LLF: -552.0899958370803
Iteration: 7, Func. Count: 49, Neg. LLF: -552.0900020771239
Optimization terminated successfully. (Exit mode 0)
Current function value: -552.0900020771489
Iterations: 7
Function evaluations: 49
Gradient evaluations: 7
Iteration: 1, Func. Count: 5, Neg. LLF: -549.8881931368106
Iteration: 2, Func. Count: 16, Neg. LLF: -549.9799885527123
Iteration: 3, Func. Count: 25, Neg. LLF: -549.9799997260095
Iteration: 4, Func. Count: 32, Neg. LLF: -549.9844645376647
Iteration: 5, Func. Count: 40, Neg. LLF: -550.0220404799186
Iteration: 6, Func. Count: 47, Neg. LLF: -550.0223999765631
Iteration: 7, Func. Count: 52, Neg. LLF: -550.0224307949202
Optimization terminated successfully. (Exit mode 0)
Current function value: -550.0224307949261
Iterations: 7
Function evaluations: 52
Gradient evaluations: 7
Iteration: 1, Func. Count: 5, Neg. LLF: -549.5516595309466
Iteration: 2, Func. Count: 16, Neg. LLF: -549.6459358372641
Iteration: 3, Func. Count: 25, Neg. LLF: -549.6462234744542
Iteration: 4, Func. Count: 31, Neg. LLF: -549.655352014207
Iteration: 5, Func. Count: 38, Neg. LLF: -549.6828183159089
Iteration: 6, Func. Count: 45, Neg. LLF: -549.6865399170309
Iteration: 7, Func. Count: 50, Neg. LLF: -549.6866337947449
Optimization terminated successfully. (Exit mode 0)
Current function value: -549.6866337947438
Iterations: 7
Function evaluations: 50
Gradient evaluations: 7
Iteration: 1, Func. Count: 5, Neg. LLF: -549.6080061747916
Iteration: 2, Func. Count: 16, Neg. LLF: -549.7023781273581
Iteration: 3, Func. Count: 25, Neg. LLF: -549.7026581881835
Iteration: 4, Func. Count: 31, Neg. LLF: -549.7119376993758
Iteration: 5, Func. Count: 38, Neg. LLF: -549.7393897097634
Iteration: 6, Func. Count: 45, Neg. LLF: -549.7430618049783
Iteration: 7, Func. Count: 50, Neg. LLF: -549.74315515381
Optimization terminated successfully. (Exit mode 0)
Current function value: -549.7431551538091
Iterations: 7
Function evaluations: 50
Gradient evaluations: 7
Iteration: 1, Func. Count: 5, Neg. LLF: -549.637244349138
Iteration: 2, Func. Count: 16, Neg. LLF: -549.7299875804406
Iteration: 3, Func. Count: 25, Neg. LLF: -549.7300195793914
Iteration: 4, Func. Count: 31, Neg. LLF: -549.735774301206
Iteration: 5, Func. Count: 38, Neg. LLF: -549.7634742333016
Iteration: 6, Func. Count: 45, Neg. LLF: -549.768970192127
Iteration: 7, Func. Count: 50, Neg. LLF: -549.7690576072407
Optimization terminated successfully. (Exit mode 0)
Current function value: -549.7690576072434
Iterations: 7
Function evaluations: 50
Gradient evaluations: 7
Iteration: 1, Func. Count: 5, Neg. LLF: -550.1523261354009
Iteration: 2, Func. Count: 16, Neg. LLF: -550.243259321227
Iteration: 3, Func. Count: 24, Neg. LLF: -550.2434467583294
Iteration: 4, Func. Count: 33, Neg. LLF: -550.2783149552203
Iteration: 5, Func. Count: 41, Neg. LLF: -550.2783590149863
Iteration: 6, Func. Count: 47, Neg. LLF: -550.2825543166123
Optimization terminated successfully. (Exit mode 0)
Current function value: -550.2825545367351
Iterations: 6
Function evaluations: 48
Gradient evaluations: 6
Iteration: 1, Func. Count: 5, Neg. LLF: -549.6795514398597
Iteration: 2, Func. Count: 16, Neg. LLF: -549.7673254752799
Iteration: 3, Func. Count: 25, Neg. LLF: -549.7675429189454
Iteration: 4, Func. Count: 31, Neg. LLF: -549.7844951737826
Iteration: 5, Func. Count: 38, Neg. LLF: -549.7968691958689
Iteration: 6, Func. Count: 44, Neg. LLF: -549.8034071298455
Iteration: 7, Func. Count: 49, Neg. LLF: -549.8039800089196
Iteration: 8, Func. Count: 54, Neg. LLF: -549.8039830571031
Optimization terminated successfully. (Exit mode 0)
Current function value: -549.8039830570931
Iterations: 8
Function evaluations: 54
Gradient evaluations: 8
Iteration: 1, Func. Count: 5, Neg. LLF: -549.600377164091
Iteration: 2, Func. Count: 16, Neg. LLF: -549.6792805414555
Iteration: 3, Func. Count: 25, Neg. LLF: -549.6797850216253
Iteration: 4, Func. Count: 31, Neg. LLF: -549.6876762642613
Iteration: 5, Func. Count: 38, Neg. LLF: -549.7109111329985
Iteration: 6, Func. Count: 45, Neg. LLF: -549.7146818269066
Iteration: 7, Func. Count: 50, Neg. LLF: -549.7147899261504
Optimization terminated successfully. (Exit mode 0)
Current function value: -549.7147899261765
Iterations: 7
Function evaluations: 50
Gradient evaluations: 7
Iteration: 1, Func. Count: 5, Neg. LLF: -549.6076503938641
Iteration: 2, Func. Count: 16, Neg. LLF: -549.693260211877
Iteration: 3, Func. Count: 25, Neg. LLF: -549.6935565449648
Iteration: 4, Func. Count: 31, Neg. LLF: -549.7011417122653
Iteration: 5, Func. Count: 38, Neg. LLF: -549.726506017698
Iteration: 6, Func. Count: 45, Neg. LLF: -549.7305387582122
Iteration: 7, Func. Count: 50, Neg. LLF: -549.7306391216737
Optimization terminated successfully. (Exit mode 0)
Current function value: -549.7306391216762
Iterations: 7
Function evaluations: 50
Gradient evaluations: 7
Iteration: 1, Func. Count: 5, Neg. LLF: -548.6724855760544
Iteration: 2, Func. Count: 16, Neg. LLF: -548.7667726628375
Iteration: 3, Func. Count: 24, Neg. LLF: -548.7691091246163
Iteration: 4, Func. Count: 30, Neg. LLF: -548.7922678319167
Iteration: 5, Func. Count: 37, Neg. LLF: -548.8097749651704
Iteration: 6, Func. Count: 44, Neg. LLF: -548.8102652397988
Iteration: 7, Func. Count: 49, Neg. LLF: -548.8104323322236
Optimization terminated successfully. (Exit mode 0)
Current function value: -548.8104323322752
Iterations: 7
Function evaluations: 49
Gradient evaluations: 7
Iteration: 1, Func. Count: 5, Neg. LLF: -548.4725234755069
Iteration: 2, Func. Count: 16, Neg. LLF: -548.5619443684733
Iteration: 3, Func. Count: 27, Neg. LLF: -548.5808856770955
Iteration: 4, Func. Count: 38, Neg. LLF: -548.5848815882161
Iteration: 5, Func. Count: 49, Neg. LLF: -548.5857156693435
Iteration: 6, Func. Count: 60, Neg. LLF: -548.5858854851964
Iteration: 7, Func. Count: 67, Neg. LLF: -548.5944191339624
Iteration: 8, Func. Count: 74, Neg. LLF: -548.6204408719582
Iteration: 9, Func. Count: 81, Neg. LLF: -548.6359101811026
Iteration: 10, Func. Count: 88, Neg. LLF: -548.6360135742757
Iteration: 11, Func. Count: 95, Neg. LLF: -548.6360606797981
Iteration: 12, Func. Count: 100, Neg. LLF: -548.6360882942472
Optimization terminated successfully. (Exit mode 0)
Current function value: -548.6360882942322
Iterations: 16
Function evaluations: 100
Gradient evaluations: 12
Iteration: 1, Func. Count: 5, Neg. LLF: -548.8766609104271
Iteration: 2, Func. Count: 16, Neg. LLF: -548.9908678299505
Iteration: 3, Func. Count: 24, Neg. LLF: -548.991119257345
Iteration: 4, Func. Count: 31, Neg. LLF: -549.00697356316
Iteration: 5, Func. Count: 38, Neg. LLF: -549.0358847738105
Iteration: 6, Func. Count: 45, Neg. LLF: -549.0365409326375
Optimization terminated successfully. (Exit mode 0)
Current function value: -549.0365416802699
Iterations: 6
Function evaluations: 46
Gradient evaluations: 6
Iteration: 1, Func. Count: 5, Neg. LLF: -550.8877665481039
Iteration: 2, Func. Count: 16, Neg. LLF: -551.0047275502545
Iteration: 3, Func. Count: 23, Neg. LLF: -551.0144820470773
Iteration: 4, Func. Count: 30, Neg. LLF: -551.0159295951171
Iteration: 5, Func. Count: 37, Neg. LLF: -551.0659152613401
Optimization terminated successfully. (Exit mode 0)
Current function value: -551.0659154504988
Iterations: 5
Function evaluations: 42
Gradient evaluations: 5
Iteration: 1, Func. Count: 5, Neg. LLF: -550.7578508163328
Iteration: 2, Func. Count: 16, Neg. LLF: -550.8742836610897
Iteration: 3, Func. Count: 23, Neg. LLF: -550.8855580296336
Iteration: 4, Func. Count: 30, Neg. LLF: -550.8863187969367
Iteration: 5, Func. Count: 38, Neg. LLF: -550.9356434678286
Iteration: 6, Func. Count: 45, Neg. LLF: -550.9359483115264
Iteration: 7, Func. Count: 51, Neg. LLF: -550.9366173049601
Iteration: 8, Func. Count: 56, Neg. LLF: -550.9366186724735
Optimization terminated successfully. (Exit mode 0)
Current function value: -550.9366186724743
Iterations: 8
Function evaluations: 56
Gradient evaluations: 8
Iteration: 1, Func. Count: 5, Neg. LLF: -550.9515614490756
Iteration: 2, Func. Count: 16, Neg. LLF: -551.0691284852511
Iteration: 3, Func. Count: 23, Neg. LLF: -551.0765009406321
Iteration: 4, Func. Count: 30, Neg. LLF: -551.0832421952086
Iteration: 5, Func. Count: 37, Neg. LLF: -551.1326075379637
Iteration: 6, Func. Count: 46, Neg. LLF: -551.1326093956532
Iteration: 7, Func. Count: 51, Neg. LLF: -551.1326126491781
Optimization terminated successfully. (Exit mode 0)
Current function value: -551.1326126491767
Iterations: 7
Function evaluations: 51
Gradient evaluations: 7
Iteration: 1, Func. Count: 5, Neg. LLF: -550.942274360094
Iteration: 2, Func. Count: 16, Neg. LLF: -551.0655295010224
Iteration: 3, Func. Count: 23, Neg. LLF: -551.0769049440621
Iteration: 4, Func. Count: 30, Neg. LLF: -551.078166659556
Iteration: 5, Func. Count: 37, Neg. LLF: -551.1299810745766
Iteration: 6, Func. Count: 44, Neg. LLF: -551.1302715732184
Iteration: 7, Func. Count: 51, Neg. LLF: -551.1315657847028
Iteration: 8, Func. Count: 56, Neg. LLF: -551.1315668908046
Optimization terminated successfully. (Exit mode 0)
Current function value: -551.1315668908001
Iterations: 8
Function evaluations: 56
Gradient evaluations: 8
Iteration: 1, Func. Count: 5, Neg. LLF: -551.0212669841159
Iteration: 2, Func. Count: 16, Neg. LLF: -551.1425598256355
Iteration: 3, Func. Count: 23, Neg. LLF: -551.1570781795663
Iteration: 4, Func. Count: 30, Neg. LLF: -551.1582570425421
Iteration: 5, Func. Count: 37, Neg. LLF: -551.1858681647263
Iteration: 6, Func. Count: 44, Neg. LLF: -551.1880224015971
Iteration: 7, Func. Count: 52, Neg. LLF: -551.2107224661231
Iteration: 8, Func. Count: 57, Neg. LLF: -551.2107389889377
Optimization terminated successfully. (Exit mode 0)
Current function value: -551.2107389889221
Iterations: 8
Function evaluations: 57
Gradient evaluations: 8
Iteration: 1, Func. Count: 5, Neg. LLF: -551.1144303768399
Iteration: 2, Func. Count: 16, Neg. LLF: -551.2501818816652
Iteration: 3, Func. Count: 23, Neg. LLF: -551.2793252035054
Iteration: 4, Func. Count: 34, Neg. LLF: -551.3175969805936
Iteration: 5, Func. Count: 41, Neg. LLF: -551.3238312820604
Iteration: 6, Func. Count: 48, Neg. LLF: -551.3245202530812
Iteration: 7, Func. Count: 56, Neg. LLF: -551.3417588222339
Optimization terminated successfully. (Exit mode 0)
Current function value: -551.3417594327207
Iterations: 8
Function evaluations: 61
Gradient evaluations: 7
Iteration: 1, Func. Count: 5, Neg. LLF: -552.516240371001
Iteration: 2, Func. Count: 16, Neg. LLF: -552.6495334970448
Iteration: 3, Func. Count: 23, Neg. LLF: -552.6957113742992
Iteration: 4, Func. Count: 31, Neg. LLF: -552.6958484235438
Iteration: 5, Func. Count: 39, Neg. LLF: -552.7650048104286
Iteration: 6, Func. Count: 47, Neg. LLF: -552.7650732435336
Iteration: 7, Func. Count: 52, Neg. LLF: -552.7650891464551
Optimization terminated successfully. (Exit mode 0)
Current function value: -552.7650891464361
Iterations: 7
Function evaluations: 52
Gradient evaluations: 7
Iteration: 1, Func. Count: 5, Neg. LLF: -552.7079831932369
Iteration: 2, Func. Count: 16, Neg. LLF: -552.8378041152273
Iteration: 3, Func. Count: 24, Neg. LLF: -552.8429553088831
Iteration: 4, Func. Count: 30, Neg. LLF: -552.9164281893006
Iteration: 5, Func. Count: 37, Neg. LLF: -552.922950635058
Iteration: 6, Func. Count: 44, Neg. LLF: -552.9367373827288
Iteration: 7, Func. Count: 49, Neg. LLF: -552.9368363657204
Optimization terminated successfully. (Exit mode 0)
Current function value: -552.9368363658073
Iterations: 7
Function evaluations: 49
Gradient evaluations: 7
Iteration: 1, Func. Count: 5, Neg. LLF: -553.015382636456
Iteration: 2, Func. Count: 16, Neg. LLF: -553.1511304237913
Iteration: 3, Func. Count: 24, Neg. LLF: -553.1570286007009
Iteration: 4, Func. Count: 32, Neg. LLF: -553.260787352443
Iteration: 5, Func. Count: 39, Neg. LLF: -553.2619909723167
Iteration: 6, Func. Count: 44, Neg. LLF: -553.2619943447166
Optimization terminated successfully. (Exit mode 0)
Current function value: -553.2619943447069
Iterations: 6
Function evaluations: 44
Gradient evaluations: 6
Iteration: 1, Func. Count: 5, Neg. LLF: -553.4250880211115
Iteration: 2, Func. Count: 16, Neg. LLF: -553.5620972947038
Iteration: 3, Func. Count: 24, Neg. LLF: -553.5674563015705
Iteration: 4, Func. Count: 31, Neg. LLF: -553.6770203354987
Iteration: 5, Func. Count: 38, Neg. LLF: -553.6774634039613
Iteration: 6, Func. Count: 43, Neg. LLF: -553.6777023983643
Optimization terminated successfully. (Exit mode 0)
Current function value: -553.6777023984075
Iterations: 6
Function evaluations: 43
Gradient evaluations: 6
Iteration: 1, Func. Count: 5, Neg. LLF: -553.4277923386439
Iteration: 2, Func. Count: 16, Neg. LLF: -553.5652309936519
Iteration: 3, Func. Count: 24, Neg. LLF: -553.5713208642001
Iteration: 4, Func. Count: 30, Neg. LLF: -553.6690336529025
Iteration: 5, Func. Count: 37, Neg. LLF: -553.6780424954201
Iteration: 6, Func. Count: 44, Neg. LLF: -553.6832957880031
Iteration: 7, Func. Count: 49, Neg. LLF: -553.683563897358
Optimization terminated successfully. (Exit mode 0)
Current function value: -553.6835642651957
Iterations: 7
Function evaluations: 50
Gradient evaluations: 7
Iteration: 1, Func. Count: 5, Neg. LLF: -553.2980408609042
Iteration: 2, Func. Count: 16, Neg. LLF: -553.4297277377202
Iteration: 3, Func. Count: 24, Neg. LLF: -553.4350443105496
Iteration: 4, Func. Count: 31, Neg. LLF: -553.5412935277013
Iteration: 5, Func. Count: 39, Neg. LLF: -553.5413062851169
Iteration: 6, Func. Count: 44, Neg. LLF: -553.5413078092183
Optimization terminated successfully. (Exit mode 0)
Current function value: -553.5413078091863
Iterations: 6
Function evaluations: 44
Gradient evaluations: 6
Iteration: 1, Func. Count: 5, Neg. LLF: -553.4628910628342
Iteration: 2, Func. Count: 16, Neg. LLF: -553.5944469315345
Iteration: 3, Func. Count: 24, Neg. LLF: -553.6004421579677
Iteration: 4, Func. Count: 30, Neg. LLF: -553.6804122757248
Iteration: 5, Func. Count: 37, Neg. LLF: -553.6854063019102
Iteration: 6, Func. Count: 44, Neg. LLF: -553.710967521544
Iteration: 7, Func. Count: 49, Neg. LLF: -553.711047611039
Optimization terminated successfully. (Exit mode 0)
Current function value: -553.7110476111098
Iterations: 7
Function evaluations: 49
Gradient evaluations: 7
Iteration: 1, Func. Count: 5, Neg. LLF: -552.8844688283752
Iteration: 2, Func. Count: 16, Neg. LLF: -553.0106875573202
Iteration: 3, Func. Count: 23, Neg. LLF: -553.0425908916145
Iteration: 4, Func. Count: 30, Neg. LLF: -553.0520647105745
Iteration: 5, Func. Count: 37, Neg. LLF: -553.079673370321
Iteration: 6, Func. Count: 44, Neg. LLF: -553.0954792930202
Iteration: 7, Func. Count: 51, Neg. LLF: -553.1313879426077
Iteration: 8, Func. Count: 60, Neg. LLF: -553.1313911438804
Iteration: 9, Func. Count: 65, Neg. LLF: -553.1327087856636
Iteration: 10, Func. Count: 70, Neg. LLF: -553.1328056457501
Optimization terminated successfully. (Exit mode 0)
Current function value: -553.1328056457571
Iterations: 10
Function evaluations: 70
Gradient evaluations: 10
Iteration: 1, Func. Count: 5, Neg. LLF: -552.3495194032007
Iteration: 2, Func. Count: 16, Neg. LLF: -552.4747503162455
Iteration: 3, Func. Count: 27, Neg. LLF: -552.517740613609
Iteration: 4, Func. Count: 38, Neg. LLF: -552.532245071915
Iteration: 5, Func. Count: 49, Neg. LLF: -552.5371962761046
Iteration: 6, Func. Count: 57, Neg. LLF: -552.5374497322614
Iteration: 7, Func. Count: 64, Neg. LLF: -552.5866632120137
Iteration: 8, Func. Count: 73, Neg. LLF: -552.586690166991
Iteration: 9, Func. Count: 80, Neg. LLF: -552.586692497855
Iteration: 10, Func. Count: 86, Neg. LLF: -552.5867260390474
Optimization terminated successfully. (Exit mode 0)
Current function value: -552.5867260390519
Iterations: 13
Function evaluations: 86
Gradient evaluations: 10
Iteration: 1, Func. Count: 5, Neg. LLF: -551.7108584620273
Iteration: 2, Func. Count: 16, Neg. LLF: -551.8413123828575
Iteration: 3, Func. Count: 25, Neg. LLF: -551.8413232785934
Iteration: 4, Func. Count: 36, Neg. LLF: -551.8858747622128
Iteration: 5, Func. Count: 44, Neg. LLF: -551.8871246443819
Iteration: 6, Func. Count: 51, Neg. LLF: -551.9332990595082
Iteration: 7, Func. Count: 59, Neg. LLF: -551.9388564228516
Iteration: 8, Func. Count: 66, Neg. LLF: -551.9418476706205
Iteration: 9, Func. Count: 73, Neg. LLF: -551.945038319437
Iteration: 10, Func. Count: 78, Neg. LLF: -551.9451081465115
Optimization terminated successfully. (Exit mode 0)
Current function value: -551.9451081464863
Iterations: 11
Function evaluations: 78
Gradient evaluations: 10
Iteration: 1, Func. Count: 5, Neg. LLF: -551.599731077632
Iteration: 2, Func. Count: 16, Neg. LLF: -551.7271202911973
Iteration: 3, Func. Count: 27, Neg. LLF: -551.7704983681109
Iteration: 4, Func. Count: 38, Neg. LLF: -551.7850756361463
Iteration: 5, Func. Count: 49, Neg. LLF: -551.7900198322066
Iteration: 6, Func. Count: 56, Neg. LLF: -551.8154339784077
Iteration: 7, Func. Count: 64, Neg. LLF: -551.8154555904257
Iteration: 8, Func. Count: 72, Neg. LLF: -551.8175993905165
Iteration: 9, Func. Count: 79, Neg. LLF: -551.8176302848888
Iteration: 10, Func. Count: 84, Neg. LLF: -551.8176372651545
Optimization terminated successfully. (Exit mode 0)
Current function value: -551.8176372651506
Iterations: 13
Function evaluations: 84
Gradient evaluations: 10
Iteration: 1, Func. Count: 5, Neg. LLF: -551.8311125179523
Iteration: 2, Func. Count: 16, Neg. LLF: -551.9611839704481
Iteration: 3, Func. Count: 27, Neg. LLF: -552.0069116162811
Iteration: 4, Func. Count: 34, Neg. LLF: -552.0094314924451
Iteration: 5, Func. Count: 45, Neg. LLF: -552.0250127487942
Iteration: 6, Func. Count: 52, Neg. LLF: -552.0494234026628
Iteration: 7, Func. Count: 59, Neg. LLF: -552.0631130769773
Iteration: 8, Func. Count: 66, Neg. LLF: -552.0721472302275
Iteration: 9, Func. Count: 74, Neg. LLF: -552.0721662322534
Iteration: 10, Func. Count: 79, Neg. LLF: -552.0722979016818
Optimization terminated successfully. (Exit mode 0)
Current function value: -552.072298376863
Iterations: 12
Function evaluations: 80
Gradient evaluations: 10
Iteration: 1, Func. Count: 5, Neg. LLF: -552.0112964116329
Iteration: 2, Func. Count: 16, Neg. LLF: -552.1414206483687
Iteration: 3, Func. Count: 24, Neg. LLF: -552.1415815531204
Iteration: 4, Func. Count: 31, Neg. LLF: -552.1984280355113
Iteration: 5, Func. Count: 39, Neg. LLF: -552.257751235085
Iteration: 6, Func. Count: 46, Neg. LLF: -552.2585459450378
Iteration: 7, Func. Count: 52, Neg. LLF: -552.2606068776374
Iteration: 8, Func. Count: 57, Neg. LLF: -552.26095793321
Optimization terminated successfully. (Exit mode 0)
Current function value: -552.2609579332242
Iterations: 8
Function evaluations: 57
Gradient evaluations: 8
Iteration: 1, Func. Count: 5, Neg. LLF: -551.8993577900403
Iteration: 2, Func. Count: 16, Neg. LLF: -552.0231708183342
Iteration: 3, Func. Count: 24, Neg. LLF: -552.0321744867003
Iteration: 4, Func. Count: 30, Neg. LLF: -552.1241140858807
Iteration: 5, Func. Count: 37, Neg. LLF: -552.1278863970908
Iteration: 6, Func. Count: 44, Neg. LLF: -552.1324962717674
Iteration: 7, Func. Count: 49, Neg. LLF: -552.1332528115381
Optimization terminated successfully. (Exit mode 0)
Current function value: -552.1332528116563
Iterations: 7
Function evaluations: 49
Gradient evaluations: 7
Iteration: 1, Func. Count: 5, Neg. LLF: -552.4117009327672
Iteration: 2, Func. Count: 16, Neg. LLF: -552.537976980971
Iteration: 3, Func. Count: 27, Neg. LLF: -552.5833308832966
Iteration: 4, Func. Count: 34, Neg. LLF: -552.5862148398522
Iteration: 5, Func. Count: 41, Neg. LLF: -552.64591269323
Iteration: 6, Func. Count: 48, Neg. LLF: -552.659288380615
Iteration: 7, Func. Count: 56, Neg. LLF: -552.659349773659
Iteration: 8, Func. Count: 62, Neg. LLF: -552.6600537111004
Iteration: 9, Func. Count: 67, Neg. LLF: -552.660082899678
Optimization terminated successfully. (Exit mode 0)
Current function value: -552.6600828996808
Iterations: 10
Function evaluations: 67
Gradient evaluations: 9
Iteration: 1, Func. Count: 5, Neg. LLF: -552.5803378331896
Iteration: 2, Func. Count: 16, Neg. LLF: -552.7046834866734
Iteration: 3, Func. Count: 23, Neg. LLF: -552.7131570824199
Iteration: 4, Func. Count: 30, Neg. LLF: -552.7149473651259
Iteration: 5, Func. Count: 37, Neg. LLF: -552.8153703109905
Iteration: 6, Func. Count: 44, Neg. LLF: -552.8166805855832
Iteration: 7, Func. Count: 51, Neg. LLF: -552.8256278276779
Iteration: 8, Func. Count: 57, Neg. LLF: -552.8277497758602
Iteration: 9, Func. Count: 62, Neg. LLF: -552.82794767363
Optimization terminated successfully. (Exit mode 0)
Current function value: -552.8279483344697
Iterations: 9
Function evaluations: 63
Gradient evaluations: 9
Iteration: 1, Func. Count: 5, Neg. LLF: -552.3639954142066
Iteration: 2, Func. Count: 16, Neg. LLF: -552.491846406138
Iteration: 3, Func. Count: 27, Neg. LLF: -552.5379137984573
Iteration: 4, Func. Count: 38, Neg. LLF: -552.5543086751743
Iteration: 5, Func. Count: 45, Neg. LLF: -552.5972639266423
Iteration: 6, Func. Count: 53, Neg. LLF: -552.597521337676
Iteration: 7, Func. Count: 61, Neg. LLF: -552.6114977984855
Optimization terminated successfully. (Exit mode 0)
Current function value: -552.6114985371535
Iterations: 9
Function evaluations: 65
Gradient evaluations: 7
Iteration: 1, Func. Count: 5, Neg. LLF: -552.3106642837809
Iteration: 2, Func. Count: 16, Neg. LLF: -552.4474706984454
Iteration: 3, Func. Count: 27, Neg. LLF: -552.4963256961579
Iteration: 4, Func. Count: 38, Neg. LLF: -552.5135775783258
Iteration: 5, Func. Count: 49, Neg. LLF: -552.5197293465635
Iteration: 6, Func. Count: 56, Neg. LLF: -552.5681517444785
Iteration: 7, Func. Count: 64, Neg. LLF: -552.568157598342
Iteration: 8, Func. Count: 72, Neg. LLF: -552.5684973801915
Optimization terminated successfully. (Exit mode 0)
Current function value: -552.5684980143661
Iterations: 11
Function evaluations: 76
Gradient evaluations: 8
Iteration: 1, Func. Count: 5, Neg. LLF: -552.3707729631201
Iteration: 2, Func. Count: 16, Neg. LLF: -552.5121067716477
Iteration: 3, Func. Count: 27, Neg. LLF: -552.5628229145104
Iteration: 4, Func. Count: 38, Neg. LLF: -552.580844729897
Iteration: 5, Func. Count: 49, Neg. LLF: -552.5872865844202
Iteration: 6, Func. Count: 60, Neg. LLF: -552.5895936179268
Positive directional derivative for linesearch (Exit mode 8)
Current function value: -552.5895935984868
Iterations: 10
Function evaluations: 60
Gradient evaluations: 6
Iteration: 1, Func. Count: 5, Neg. LLF: -552.2280046089668
Iteration: 2, Func. Count: 16, Neg. LLF: -552.3647236186424
Iteration: 3, Func. Count: 27, Neg. LLF: -552.4134048454359
Iteration: 4, Func. Count: 38, Neg. LLF: -552.4305808021525
Iteration: 5, Func. Count: 49, Neg. LLF: -552.4366893540689
Iteration: 6, Func. Count: 57, Neg. LLF: -552.4369947025957
Iteration: 7, Func. Count: 64, Neg. LLF: -552.4812670584749
Iteration: 8, Func. Count: 73, Neg. LLF: -552.4812759310497
Iteration: 9, Func. Count: 80, Neg. LLF: -552.4823743923943
Iteration: 10, Func. Count: 85, Neg. LLF: -552.4823882259316
Iteration: 11, Func. Count: 90, Neg. LLF: -552.4823902082368
Optimization terminated successfully. (Exit mode 0)
Current function value: -552.4823902082335
Iterations: 14
Function evaluations: 90
Gradient evaluations: 11
/Users/austincostello1/anaconda3/lib/python3.6/site-packages/arch/univariate/base.py:522: ConvergenceWarning: The optimizer returned code 8. The message is: Positive directional derivative for linesearch See scipy.optimize.fmin_slsqp for code meaning. ConvergenceWarning)
Iteration: 1, Func. Count: 5, Neg. LLF: -552.3274451960735
Iteration: 2, Func. Count: 16, Neg. LLF: -552.4717807963211
Iteration: 3, Func. Count: 27, Neg. LLF: -552.5229255103776
Iteration: 4, Func. Count: 38, Neg. LLF: -552.5408571337557
Iteration: 5, Func. Count: 45, Neg. LLF: -552.5794478174676
Iteration: 6, Func. Count: 52, Neg. LLF: -552.5863584681163
Iteration: 7, Func. Count: 59, Neg. LLF: -552.6047324284825
Optimization terminated successfully. (Exit mode 0)
Current function value: -552.6047324415806
Iterations: 9
Function evaluations: 66
Gradient evaluations: 7
Iteration: 1, Func. Count: 5, Neg. LLF: -552.6375217080597
Iteration: 2, Func. Count: 16, Neg. LLF: -552.7794867567188
Iteration: 3, Func. Count: 27, Neg. LLF: -552.8310220204521
Iteration: 4, Func. Count: 38, Neg. LLF: -552.8495223246325
Iteration: 5, Func. Count: 49, Neg. LLF: -552.8562148641006
Iteration: 6, Func. Count: 57, Neg. LLF: -552.8563803903306
Iteration: 7, Func. Count: 68, Neg. LLF: -552.8588000390863
Iteration: 8, Func. Count: 75, Neg. LLF: -552.9130308475799
Iteration: 9, Func. Count: 82, Neg. LLF: -552.9141795488924
Iteration: 10, Func. Count: 89, Neg. LLF: -552.9190357760899
Iteration: 11, Func. Count: 97, Neg. LLF: -552.9190778053952
Iteration: 12, Func. Count: 103, Neg. LLF: -552.9201065764378
Optimization terminated successfully. (Exit mode 0)
Current function value: -552.9201069538124
Iterations: 16
Function evaluations: 105
Gradient evaluations: 12
Iteration: 1, Func. Count: 5, Neg. LLF: -553.0026170275967
Iteration: 2, Func. Count: 16, Neg. LLF: -553.1207716714919
Iteration: 3, Func. Count: 24, Neg. LLF: -553.1215278288204
Iteration: 4, Func. Count: 32, Neg. LLF: -553.2354145152983
Iteration: 5, Func. Count: 39, Neg. LLF: -553.2357343036192
Iteration: 6, Func. Count: 45, Neg. LLF: -553.2364050667455
Iteration: 7, Func. Count: 50, Neg. LLF: -553.2364062632001
Optimization terminated successfully. (Exit mode 0)
Current function value: -553.2364062631975
Iterations: 7
Function evaluations: 50
Gradient evaluations: 7
Iteration: 1, Func. Count: 5, Neg. LLF: -553.627428727888
Iteration: 2, Func. Count: 16, Neg. LLF: -553.7580942813489
Iteration: 3, Func. Count: 25, Neg. LLF: -553.7594332749229
Iteration: 4, Func. Count: 32, Neg. LLF: -553.8663254240123
Iteration: 5, Func. Count: 38, Neg. LLF: -553.8895082304173
Iteration: 6, Func. Count: 46, Neg. LLF: -553.8895304791079
Iteration: 7, Func. Count: 51, Neg. LLF: -553.8908734385313
Iteration: 8, Func. Count: 56, Neg. LLF: -553.8913634625821
Optimization terminated successfully. (Exit mode 0)
Current function value: -553.8913634626579
Iterations: 8
Function evaluations: 56
Gradient evaluations: 8
Iteration: 1, Func. Count: 5, Neg. LLF: -553.5262558932301
Iteration: 2, Func. Count: 16, Neg. LLF: -553.6405857074055
Iteration: 3, Func. Count: 24, Neg. LLF: -553.6438859025354
Iteration: 4, Func. Count: 31, Neg. LLF: -553.7743971681853
Iteration: 5, Func. Count: 38, Neg. LLF: -553.7787306152992
Iteration: 6, Func. Count: 45, Neg. LLF: -553.778928055781
Iteration: 7, Func. Count: 50, Neg. LLF: -553.7789316378269
Optimization terminated successfully. (Exit mode 0)
Current function value: -553.7789316378175
Iterations: 7
Function evaluations: 50
Gradient evaluations: 7
Iteration: 1, Func. Count: 5, Neg. LLF: -553.2421188307528
Iteration: 2, Func. Count: 16, Neg. LLF: -553.3518737850129
Iteration: 3, Func. Count: 24, Neg. LLF: -553.3574752331963
Iteration: 4, Func. Count: 30, Neg. LLF: -553.4586889266195
Iteration: 5, Func. Count: 37, Neg. LLF: -553.4961214058003
Iteration: 6, Func. Count: 44, Neg. LLF: -553.4980216908675
Iteration: 7, Func. Count: 49, Neg. LLF: -553.4980962165969
Iteration: 8, Func. Count: 54, Neg. LLF: -553.4981106792126
Optimization terminated successfully. (Exit mode 0)
Current function value: -553.4981106792154
Iterations: 8
Function evaluations: 54
Gradient evaluations: 8
Iteration: 1, Func. Count: 5, Neg. LLF: -552.9714912197596
Iteration: 2, Func. Count: 16, Neg. LLF: -553.0700743368641
Iteration: 3, Func. Count: 24, Neg. LLF: -553.0768574047081
Iteration: 4, Func. Count: 30, Neg. LLF: -553.2032795242202
Iteration: 5, Func. Count: 37, Neg. LLF: -553.2168388217657
Iteration: 6, Func. Count: 45, Neg. LLF: -553.2168904267253
Iteration: 7, Func. Count: 50, Neg. LLF: -553.2169630384578
Optimization terminated successfully. (Exit mode 0)
Current function value: -553.2169630385126
Iterations: 7
Function evaluations: 50
Gradient evaluations: 7
Iteration: 1, Func. Count: 5, Neg. LLF: -552.5939682387686
Iteration: 2, Func. Count: 16, Neg. LLF: -552.7145040415462
Iteration: 3, Func. Count: 23, Neg. LLF: -552.7413490104645
Iteration: 4, Func. Count: 30, Neg. LLF: -552.7647801214558
Iteration: 5, Func. Count: 37, Neg. LLF: -552.8370581850634
Iteration: 6, Func. Count: 45, Neg. LLF: -552.8370775053793
Iteration: 7, Func. Count: 51, Neg. LLF: -552.8371838760781
Iteration: 8, Func. Count: 56, Neg. LLF: -552.8371880308864
Optimization terminated successfully. (Exit mode 0)
Current function value: -552.8371880308875
Iterations: 8
Function evaluations: 56
Gradient evaluations: 8
Iteration: 1, Func. Count: 5, Neg. LLF: -552.4517519114654
Iteration: 2, Func. Count: 16, Neg. LLF: -552.5770012644165
Iteration: 3, Func. Count: 27, Neg. LLF: -552.6214037582818
Iteration: 4, Func. Count: 34, Neg. LLF: -552.6659425146164
Iteration: 5, Func. Count: 42, Neg. LLF: -552.6660067253047
Iteration: 6, Func. Count: 49, Neg. LLF: -552.6738510542759
Iteration: 7, Func. Count: 56, Neg. LLF: -552.6740191729809
Iteration: 8, Func. Count: 63, Neg. LLF: -552.6860890471752
Iteration: 9, Func. Count: 68, Neg. LLF: -552.6860968565929
Optimization terminated successfully. (Exit mode 0)
Current function value: -552.6860968565817
Iterations: 10
Function evaluations: 68
Gradient evaluations: 9
Iteration: 1, Func. Count: 5, Neg. LLF: -552.5880664940192
Iteration: 2, Func. Count: 16, Neg. LLF: -552.7175003250716
Iteration: 3, Func. Count: 23, Neg. LLF: -552.7526990259531
Iteration: 4, Func. Count: 30, Neg. LLF: -552.7618665614921
Iteration: 5, Func. Count: 37, Neg. LLF: -552.8425356781793
Iteration: 6, Func. Count: 45, Neg. LLF: -552.8425394962617
Iteration: 7, Func. Count: 52, Neg. LLF: -552.8427026745687
Optimization terminated successfully. (Exit mode 0)
Current function value: -552.8427026746178
Iterations: 7
Function evaluations: 52
Gradient evaluations: 7
Iteration: 1, Func. Count: 5, Neg. LLF: -553.7078941794086
Iteration: 2, Func. Count: 17, Neg. LLF: -553.9451614670879
Iteration: 3, Func. Count: 24, Neg. LLF: -553.9464157557642
Iteration: 4, Func. Count: 31, Neg. LLF: -553.9466653302842
Iteration: 5, Func. Count: 39, Neg. LLF: -553.9467092214356
Iteration: 6, Func. Count: 44, Neg. LLF: -553.9470147497996
Optimization terminated successfully. (Exit mode 0)
Current function value: -553.9470147498357
Iterations: 6
Function evaluations: 44
Gradient evaluations: 6
Iteration: 1, Func. Count: 5, Neg. LLF: -555.1250970322659
Iteration: 2, Func. Count: 16, Neg. LLF: -555.194099761557
Iteration: 3, Func. Count: 25, Neg. LLF: -555.1947645212939
Iteration: 4, Func. Count: 31, Neg. LLF: -555.255735453926
Iteration: 5, Func. Count: 38, Neg. LLF: -555.2736114138963
Iteration: 6, Func. Count: 44, Neg. LLF: -555.2768105203338
Iteration: 7, Func. Count: 49, Neg. LLF: -555.2786393039618
Iteration: 8, Func. Count: 54, Neg. LLF: -555.2786469381861
Optimization terminated successfully. (Exit mode 0)
Current function value: -555.2786469381726
Iterations: 8
Function evaluations: 54
Gradient evaluations: 8
Iteration: 1, Func. Count: 5, Neg. LLF: -555.339754177468
Iteration: 2, Func. Count: 16, Neg. LLF: -555.3574601305995
Iteration: 3, Func. Count: 24, Neg. LLF: -555.364380836462
Iteration: 4, Func. Count: 31, Neg. LLF: -555.3741929905175
Iteration: 5, Func. Count: 39, Neg. LLF: -555.3906553516013
Iteration: 6, Func. Count: 46, Neg. LLF: -555.3907470694721
Iteration: 7, Func. Count: 51, Neg. LLF: -555.3908165652763
Optimization terminated successfully. (Exit mode 0)
Current function value: -555.3908165652911
Iterations: 7
Function evaluations: 51
Gradient evaluations: 7
Iteration: 1, Func. Count: 5, Neg. LLF: -560.6493247161106
Iteration: 2, Func. Count: 16, Neg. LLF: -560.6503804059578
Iteration: 3, Func. Count: 23, Neg. LLF: -560.6552423701789
Iteration: 4, Func. Count: 31, Neg. LLF: -560.6556888047519
Iteration: 5, Func. Count: 38, Neg. LLF: -560.6559243815454
Iteration: 6, Func. Count: 45, Neg. LLF: -560.6565622556607
Iteration: 7, Func. Count: 50, Neg. LLF: -560.6566283908272
Optimization terminated successfully. (Exit mode 0)
Current function value: -560.6566283908467
Iterations: 7
Function evaluations: 50
Gradient evaluations: 7
Iteration: 1, Func. Count: 5, Neg. LLF: -562.0292652617685
Iteration: 2, Func. Count: 16, Neg. LLF: -562.0330064302896
Iteration: 3, Func. Count: 25, Neg. LLF: -562.0339631387897
Iteration: 4, Func. Count: 32, Neg. LLF: -562.0370447110154
Iteration: 5, Func. Count: 40, Neg. LLF: -562.0409386064205
Iteration: 6, Func. Count: 45, Neg. LLF: -562.0409422003604
Optimization terminated successfully. (Exit mode 0)
Current function value: -562.0409422002965
Iterations: 6
Function evaluations: 45
Gradient evaluations: 6
Iteration: 1, Func. Count: 5, Neg. LLF: -566.0739208825611
Iteration: 2, Func. Count: 16, Neg. LLF: -566.074123920928
Iteration: 3, Func. Count: 23, Neg. LLF: -566.0764375248177
Iteration: 4, Func. Count: 31, Neg. LLF: -566.0769975350939
Iteration: 5, Func. Count: 38, Neg. LLF: -566.0770518102438
Iteration: 6, Func. Count: 45, Neg. LLF: -566.0772431331823
Iteration: 7, Func. Count: 50, Neg. LLF: -566.0772537430078
Optimization terminated successfully. (Exit mode 0)
Current function value: -566.077253743013
Iterations: 7
Function evaluations: 50
Gradient evaluations: 7
Iteration: 1, Func. Count: 5, Neg. LLF: -568.3900886176484
Iteration: 2, Func. Count: 16, Neg. LLF: -568.3903611337234
Iteration: 3, Func. Count: 26, Neg. LLF: -568.3903724232282
Iteration: 4, Func. Count: 33, Neg. LLF: -568.3904347081985
Iteration: 5, Func. Count: 41, Neg. LLF: -568.3908203543458
Optimization terminated successfully. (Exit mode 0)
Current function value: -568.3908203544111
Iterations: 5
Function evaluations: 41
Gradient evaluations: 5
Iteration: 1, Func. Count: 5, Neg. LLF: -567.9529102685294
Iteration: 2, Func. Count: 16, Neg. LLF: -567.9534254816731
Iteration: 3, Func. Count: 26, Neg. LLF: -567.9534287032336
Iteration: 4, Func. Count: 34, Neg. LLF: -567.9534513620015
Iteration: 5, Func. Count: 40, Neg. LLF: -567.9539080468342
Iteration: 6, Func. Count: 45, Neg. LLF: -567.954228684378
Optimization terminated successfully. (Exit mode 0)
Current function value: -567.954228684381
Iterations: 6
Function evaluations: 45
Gradient evaluations: 6
Iteration: 1, Func. Count: 5, Neg. LLF: -568.7652137704943
Iteration: 2, Func. Count: 17, Neg. LLF: -568.7652969018693
Iteration: 3, Func. Count: 25, Neg. LLF: -568.7659749160908
Iteration: 4, Func. Count: 31, Neg. LLF: -568.7707450423557
Iteration: 5, Func. Count: 39, Neg. LLF: -568.771253654954
Iteration: 6, Func. Count: 44, Neg. LLF: -568.7718395402928
Iteration: 7, Func. Count: 49, Neg. LLF: -568.7718425899382
Optimization terminated successfully. (Exit mode 0)
Current function value: -568.7718425899427
Iterations: 7
Function evaluations: 49
Gradient evaluations: 7
Iteration: 1, Func. Count: 5, Neg. LLF: -572.3945847735416
Iteration: 2, Func. Count: 17, Neg. LLF: -572.3948631640816
Iteration: 3, Func. Count: 26, Neg. LLF: -572.3951990955607
Iteration: 4, Func. Count: 32, Neg. LLF: -572.3963147367542
Iteration: 5, Func. Count: 37, Neg. LLF: -572.3969715851968
Optimization terminated successfully. (Exit mode 0)
Current function value: -572.3969719151245
Iterations: 5
Function evaluations: 39
Gradient evaluations: 5
Iteration: 1, Func. Count: 5, Neg. LLF: -573.9968914832291
Iteration: 2, Func. Count: 17, Neg. LLF: -573.9972312542449
Iteration: 3, Func. Count: 26, Neg. LLF: -573.997380816354
Iteration: 4, Func. Count: 33, Neg. LLF: -573.9977262614043
Optimization terminated successfully. (Exit mode 0)
Current function value: -573.9977262614441
Iterations: 4
Function evaluations: 33
Gradient evaluations: 4
Iteration: 1, Func. Count: 5, Neg. LLF: -574.1325391829134
Iteration: 2, Func. Count: 17, Neg. LLF: -574.1336568408798
Iteration: 3, Func. Count: 24, Neg. LLF: -574.1428643906933
Iteration: 4, Func. Count: 33, Neg. LLF: -574.1429965089662
Iteration: 5, Func. Count: 41, Neg. LLF: -574.1430083508426
Optimization terminated successfully. (Exit mode 0)
Current function value: -574.1430083508499
Iterations: 5
Function evaluations: 41
Gradient evaluations: 5
Iteration: 1, Func. Count: 5, Neg. LLF: -574.2784480207293
Iteration: 2, Func. Count: 17, Neg. LLF: -574.2785907256884
Iteration: 3, Func. Count: 25, Neg. LLF: -574.2792578930498
Iteration: 4, Func. Count: 31, Neg. LLF: -574.287843049806
Iteration: 5, Func. Count: 39, Neg. LLF: -574.2882296520645
Iteration: 6, Func. Count: 44, Neg. LLF: -574.2886960806386
Iteration: 7, Func. Count: 49, Neg. LLF: -574.2887011123305
Optimization terminated successfully. (Exit mode 0)
Current function value: -574.2887011123391
Iterations: 7
Function evaluations: 49
Gradient evaluations: 7
Iteration: 1, Func. Count: 5, Neg. LLF: -576.9254896119078
Iteration: 2, Func. Count: 16, Neg. LLF: -576.9254986970445
Iteration: 3, Func. Count: 24, Neg. LLF: -576.9301133712761
Iteration: 4, Func. Count: 30, Neg. LLF: -576.9911394064079
Iteration: 5, Func. Count: 38, Neg. LLF: -576.9912865664121
Iteration: 6, Func. Count: 43, Neg. LLF: -576.9927620341589
Iteration: 7, Func. Count: 48, Neg. LLF: -576.9928173970361
Optimization terminated successfully. (Exit mode 0)
Current function value: -576.9928173970861
Iterations: 7
Function evaluations: 48
Gradient evaluations: 7
Iteration: 1, Func. Count: 5, Neg. LLF: -577.9876696317094
Iteration: 2, Func. Count: 17, Neg. LLF: -577.9877431349395
Iteration: 3, Func. Count: 25, Neg. LLF: -577.9913130612165
Iteration: 4, Func. Count: 31, Neg. LLF: -578.0384834156539
Iteration: 5, Func. Count: 38, Neg. LLF: -578.0385285545874
Iteration: 6, Func. Count: 43, Neg. LLF: -578.0398630501306
Iteration: 7, Func. Count: 48, Neg. LLF: -578.0399943367503
Optimization terminated successfully. (Exit mode 0)
Current function value: -578.0399943368257
Iterations: 7
Function evaluations: 48
Gradient evaluations: 7
Iteration: 1, Func. Count: 5, Neg. LLF: -578.0497380913237
Iteration: 2, Func. Count: 17, Neg. LLF: -578.0497845255837
Iteration: 3, Func. Count: 25, Neg. LLF: -578.0532345904976
Iteration: 4, Func. Count: 31, Neg. LLF: -578.0974950543116
Iteration: 5, Func. Count: 38, Neg. LLF: -578.0975425333725
Iteration: 6, Func. Count: 43, Neg. LLF: -578.0987831309319
Iteration: 7, Func. Count: 48, Neg. LLF: -578.0989436718681
Optimization terminated successfully. (Exit mode 0)
Current function value: -578.098943671949
Iterations: 7
Function evaluations: 48
Gradient evaluations: 7
Iteration: 1, Func. Count: 5, Neg. LLF: -578.1078207204096
Iteration: 2, Func. Count: 17, Neg. LLF: -578.1079149098296
Iteration: 3, Func. Count: 25, Neg. LLF: -578.1112501994485
Iteration: 4, Func. Count: 31, Neg. LLF: -578.1541655587695
Iteration: 5, Func. Count: 38, Neg. LLF: -578.154233982174
Iteration: 6, Func. Count: 44, Neg. LLF: -578.1559100935744
Iteration: 7, Func. Count: 49, Neg. LLF: -578.1560478728845
Iteration: 8, Func. Count: 54, Neg. LLF: -578.1560915639827
Optimization terminated successfully. (Exit mode 0)
Current function value: -578.1560915639909
Iterations: 8
Function evaluations: 54
Gradient evaluations: 8
Iteration: 1, Func. Count: 5, Neg. LLF: -578.2664423618485
Iteration: 2, Func. Count: 16, Neg. LLF: -578.2664519206655
Iteration: 3, Func. Count: 24, Neg. LLF: -578.2682776602151
Iteration: 4, Func. Count: 30, Neg. LLF: -578.2900192745179
Iteration: 5, Func. Count: 38, Neg. LLF: -578.2902008545852
Iteration: 6, Func. Count: 44, Neg. LLF: -578.2922215640957
Iteration: 7, Func. Count: 49, Neg. LLF: -578.2931402319962
Iteration: 8, Func. Count: 54, Neg. LLF: -578.2931479001312
Optimization terminated successfully. (Exit mode 0)
Current function value: -578.2931479001588
Iterations: 8
Function evaluations: 54
Gradient evaluations: 8
Iteration: 1, Func. Count: 5, Neg. LLF: -579.0379425393028
Iteration: 2, Func. Count: 17, Neg. LLF: -579.0379676294015
Iteration: 3, Func. Count: 25, Neg. LLF: -579.0392323094434
Iteration: 4, Func. Count: 31, Neg. LLF: -579.0561787790768
Iteration: 5, Func. Count: 39, Neg. LLF: -579.0563256775099
Iteration: 6, Func. Count: 44, Neg. LLF: -579.0570859115983
Iteration: 7, Func. Count: 49, Neg. LLF: -579.0570954152554
Optimization terminated successfully. (Exit mode 0)
Current function value: -579.05709541527
Iterations: 7
Function evaluations: 49
Gradient evaluations: 7
Iteration: 1, Func. Count: 5, Neg. LLF: -579.3566791714285
Iteration: 2, Func. Count: 17, Neg. LLF: -579.3567003441586
Iteration: 3, Func. Count: 25, Neg. LLF: -579.3576873551974
Iteration: 4, Func. Count: 31, Neg. LLF: -579.3715491568172
Iteration: 5, Func. Count: 39, Neg. LLF: -579.3717665260756
Iteration: 6, Func. Count: 44, Neg. LLF: -579.372425706044
Iteration: 7, Func. Count: 49, Neg. LLF: -579.3724343277777
Optimization terminated successfully. (Exit mode 0)
Current function value: -579.3724343277906
Iterations: 7
Function evaluations: 49
Gradient evaluations: 7
Iteration: 1, Func. Count: 5, Neg. LLF: -579.4437790603386
Iteration: 2, Func. Count: 17, Neg. LLF: -579.4438816085833
Iteration: 3, Func. Count: 26, Neg. LLF: -579.4440266020804
Iteration: 4, Func. Count: 34, Neg. LLF: -579.4440425627105
Iteration: 5, Func. Count: 39, Neg. LLF: -579.4440449152476
Optimization terminated successfully. (Exit mode 0)
Current function value: -579.4440449152476
Iterations: 5
Function evaluations: 39
Gradient evaluations: 5
Iteration: 1, Func. Count: 5, Neg. LLF: -581.0459507464253
Iteration: 2, Func. Count: 17, Neg. LLF: -581.0462175119932
Iteration: 3, Func. Count: 26, Neg. LLF: -581.0462361842717
Iteration: 4, Func. Count: 33, Neg. LLF: -581.0474586487192
Iteration: 5, Func. Count: 39, Neg. LLF: -581.0475571962708
Iteration: 6, Func. Count: 45, Neg. LLF: -581.0478873824029
Optimization terminated successfully. (Exit mode 0)
Current function value: -581.0478873823873
Iterations: 6
Function evaluations: 45
Gradient evaluations: 6
Iteration: 1, Func. Count: 5, Neg. LLF: -581.2223287430749
Iteration: 2, Func. Count: 17, Neg. LLF: -581.2231969379504
Iteration: 3, Func. Count: 24, Neg. LLF: -581.2367170468747
Iteration: 4, Func. Count: 32, Neg. LLF: -581.2369759517235
Iteration: 5, Func. Count: 40, Neg. LLF: -581.2369894037712
Iteration: 6, Func. Count: 46, Neg. LLF: -581.2370289950843
Optimization terminated successfully. (Exit mode 0)
Current function value: -581.2370289950572
Iterations: 6
Function evaluations: 46
Gradient evaluations: 6
Iteration: 1, Func. Count: 5, Neg. LLF: -582.2838436281427
Iteration: 2, Func. Count: 17, Neg. LLF: -582.2848117227252
Iteration: 3, Func. Count: 24, Neg. LLF: -582.3098498484253
Iteration: 4, Func. Count: 32, Neg. LLF: -582.3100144199269
Iteration: 5, Func. Count: 39, Neg. LLF: -582.3101783738073
Optimization terminated successfully. (Exit mode 0)
Current function value: -582.3101783738678
Iterations: 5
Function evaluations: 39
Gradient evaluations: 5
Iteration: 1, Func. Count: 5, Neg. LLF: -581.7049576424506
Iteration: 2, Func. Count: 17, Neg. LLF: -581.7059439279517
Iteration: 3, Func. Count: 24, Neg. LLF: -581.7218632710111
Iteration: 4, Func. Count: 32, Neg. LLF: -581.7222452931296
Iteration: 5, Func. Count: 40, Neg. LLF: -581.7222611902512
Iteration: 6, Func. Count: 46, Neg. LLF: -581.7222781561782
Optimization terminated successfully. (Exit mode 0)
Current function value: -581.7222781561685
Iterations: 6
Function evaluations: 46
Gradient evaluations: 6
Iteration: 1, Func. Count: 5, Neg. LLF: -581.3793177612218
Iteration: 2, Func. Count: 17, Neg. LLF: -581.3804722689958
Iteration: 3, Func. Count: 24, Neg. LLF: -581.3894129399717
Iteration: 4, Func. Count: 32, Neg. LLF: -581.3897081235738
Iteration: 5, Func. Count: 40, Neg. LLF: -581.3897184367518
Iteration: 6, Func. Count: 46, Neg. LLF: -581.3897491691724
Optimization terminated successfully. (Exit mode 0)
Current function value: -581.3897491691424
Iterations: 6
Function evaluations: 46
Gradient evaluations: 6
Iteration: 1, Func. Count: 5, Neg. LLF: -582.0097838054807
Iteration: 2, Func. Count: 17, Neg. LLF: -582.0141480431903
Iteration: 3, Func. Count: 24, Neg. LLF: -582.0379016515551
Iteration: 4, Func. Count: 31, Neg. LLF: -582.0403863515542
Iteration: 5, Func. Count: 39, Neg. LLF: -582.0404105298048
Iteration: 6, Func. Count: 46, Neg. LLF: -582.0407926924355
Optimization terminated successfully. (Exit mode 0)
Current function value: -582.0407926925102
Iterations: 6
Function evaluations: 46
Gradient evaluations: 6
Iteration: 1, Func. Count: 5, Neg. LLF: -585.9076627227857
Iteration: 2, Func. Count: 17, Neg. LLF: -585.9108427396623
Iteration: 3, Func. Count: 24, Neg. LLF: -585.9321220628811
Positive directional derivative for linesearch (Exit mode 8)
Current function value: -585.932122071961
Iterations: 7
Function evaluations: 24
Gradient evaluations: 3
Iteration: 1, Func. Count: 5, Neg. LLF: -586.3196509663383
Iteration: 2, Func. Count: 17, Neg. LLF: -586.321331406593
Iteration: 3, Func. Count: 25, Neg. LLF: -586.3224876986962
Iteration: 4, Func. Count: 32, Neg. LLF: -586.3237768202289
Iteration: 5, Func. Count: 40, Neg. LLF: -586.3243901426897
Iteration: 6, Func. Count: 45, Neg. LLF: -586.324478251466
Optimization terminated successfully. (Exit mode 0)
Current function value: -586.3244782513943
Iterations: 6
Function evaluations: 45
Gradient evaluations: 6
Iteration: 1, Func. Count: 5, Neg. LLF: -588.3291085782876
Iteration: 2, Func. Count: 17, Neg. LLF: -588.3314722974909
Iteration: 3, Func. Count: 26, Neg. LLF: -588.3320137550666
Iteration: 4, Func. Count: 33, Neg. LLF: -588.337567755484
Iteration: 5, Func. Count: 40, Neg. LLF: -588.3375716869289
Iteration: 6, Func. Count: 45, Neg. LLF: -588.3376041018698
Optimization terminated successfully. (Exit mode 0)
Current function value: -588.3376043205958
Iterations: 6
Function evaluations: 47
Gradient evaluations: 6
Iteration: 1, Func. Count: 5, Neg. LLF: -588.3625296468563
Iteration: 2, Func. Count: 17, Neg. LLF: -588.3638399360857
Iteration: 3, Func. Count: 24, Neg. LLF: -588.3902330643882
/Users/austincostello1/anaconda3/lib/python3.6/site-packages/arch/univariate/base.py:522: ConvergenceWarning: The optimizer returned code 8. The message is: Positive directional derivative for linesearch See scipy.optimize.fmin_slsqp for code meaning. ConvergenceWarning)
Iteration: 4, Func. Count: 35, Neg. LLF: -588.3902408363215
Iteration: 5, Func. Count: 43, Neg. LLF: -588.3937749591356
Iteration: 6, Func. Count: 50, Neg. LLF: -588.3945197406586
Iteration: 7, Func. Count: 58, Neg. LLF: -588.394543902841
Iteration: 8, Func. Count: 63, Neg. LLF: -588.3947326308275
Optimization terminated successfully. (Exit mode 0)
Current function value: -588.3947326307939
Iterations: 9
Function evaluations: 63
Gradient evaluations: 8
Iteration: 1, Func. Count: 5, Neg. LLF: -589.2992103249123
Iteration: 2, Func. Count: 17, Neg. LLF: -589.3085167466986
Iteration: 3, Func. Count: 26, Neg. LLF: -589.3090757240286
Iteration: 4, Func. Count: 33, Neg. LLF: -589.3178951028758
Iteration: 5, Func. Count: 40, Neg. LLF: -589.3179022568604
Iteration: 6, Func. Count: 45, Neg. LLF: -589.317944479456
Optimization terminated successfully. (Exit mode 0)
Current function value: -589.3179444794531
Iterations: 6
Function evaluations: 45
Gradient evaluations: 6
Iteration: 1, Func. Count: 5, Neg. LLF: -591.3394202554487
Iteration: 2, Func. Count: 17, Neg. LLF: -591.3482109769384
Iteration: 3, Func. Count: 24, Neg. LLF: -591.3669947155266
Iteration: 4, Func. Count: 32, Neg. LLF: -591.3702368281836
Iteration: 5, Func. Count: 40, Neg. LLF: -591.3703049410346
Optimization terminated successfully. (Exit mode 0)
Current function value: -591.3703052588742
Iterations: 5
Function evaluations: 43
Gradient evaluations: 5
Iteration: 1, Func. Count: 5, Neg. LLF: -590.364875730503
Iteration: 2, Func. Count: 17, Neg. LLF: -590.3737415740932
Iteration: 3, Func. Count: 26, Neg. LLF: -590.3742653221723
Iteration: 4, Func. Count: 33, Neg. LLF: -590.3877075521277
Iteration: 5, Func. Count: 41, Neg. LLF: -590.3877256529527
Iteration: 6, Func. Count: 46, Neg. LLF: -590.387775395923
Optimization terminated successfully. (Exit mode 0)
Current function value: -590.3877753959514
Iterations: 6
Function evaluations: 46
Gradient evaluations: 6
Iteration: 1, Func. Count: 5, Neg. LLF: -584.5194708353102
Iteration: 2, Func. Count: 17, Neg. LLF: -584.5210631231653
Positive directional derivative for linesearch (Exit mode 8)
Current function value: -584.5210630784852
Iterations: 6
Function evaluations: 17
Gradient evaluations: 2
Iteration: 1, Func. Count: 5, Neg. LLF: -583.814672016887
Iteration: 2, Func. Count: 17, Neg. LLF: -583.8314811167404
Positive directional derivative for linesearch (Exit mode 8)
Current function value: -583.8314810861654
Iterations: 6
Function evaluations: 17
Gradient evaluations: 2
Iteration: 1, Func. Count: 5, Neg. LLF: -582.4367227582544
Iteration: 2, Func. Count: 17, Neg. LLF: -582.4426687553465
Positive directional derivative for linesearch (Exit mode 8)
Current function value: -582.442668722407
Iterations: 6
Function evaluations: 17
Gradient evaluations: 2
Iteration: 1, Func. Count: 5, Neg. LLF: -581.233068788575
Iteration: 2, Func. Count: 17, Neg. LLF: -581.2334313251895
Iteration: 3, Func. Count: 24, Neg. LLF: -581.2375226983606
Optimization terminated successfully. (Exit mode 0)
Current function value: -581.2375227994746
Iterations: 4
Function evaluations: 32
Gradient evaluations: 3
/Users/austincostello1/anaconda3/lib/python3.6/site-packages/arch/univariate/base.py:522: ConvergenceWarning: The optimizer returned code 8. The message is: Positive directional derivative for linesearch See scipy.optimize.fmin_slsqp for code meaning. ConvergenceWarning) /Users/austincostello1/anaconda3/lib/python3.6/site-packages/arch/univariate/base.py:522: ConvergenceWarning: The optimizer returned code 8. The message is: Positive directional derivative for linesearch See scipy.optimize.fmin_slsqp for code meaning. ConvergenceWarning) /Users/austincostello1/anaconda3/lib/python3.6/site-packages/arch/univariate/base.py:522: ConvergenceWarning: The optimizer returned code 8. The message is: Positive directional derivative for linesearch See scipy.optimize.fmin_slsqp for code meaning. ConvergenceWarning)
Iteration: 1, Func. Count: 5, Neg. LLF: -580.6027227790146
Iteration: 2, Func. Count: 17, Neg. LLF: -580.6032531268243
Iteration: 3, Func. Count: 25, Neg. LLF: -580.6121841036997
Iteration: 4, Func. Count: 32, Neg. LLF: -580.6384432521866
Iteration: 5, Func. Count: 41, Neg. LLF: -580.6384497621764
Iteration: 6, Func. Count: 46, Neg. LLF: -580.6384586006141
Optimization terminated successfully. (Exit mode 0)
Current function value: -580.6384586006302
Iterations: 6
Function evaluations: 46
Gradient evaluations: 6
Iteration: 1, Func. Count: 5, Neg. LLF: -580.7447378128622
Iteration: 2, Func. Count: 17, Neg. LLF: -580.747359582245
Iteration: 3, Func. Count: 25, Neg. LLF: -580.7516685445394
Iteration: 4, Func. Count: 32, Neg. LLF: -580.7815902515163
Optimization terminated successfully. (Exit mode 0)
Current function value: -580.7815907360068
Iterations: 4
Function evaluations: 36
Gradient evaluations: 4
Iteration: 1, Func. Count: 5, Neg. LLF: -581.8859890398026
Iteration: 2, Func. Count: 17, Neg. LLF: -581.8877835137934
Iteration: 3, Func. Count: 25, Neg. LLF: -581.8904119643025
Iteration: 4, Func. Count: 32, Neg. LLF: -581.9304412243039
Iteration: 5, Func. Count: 41, Neg. LLF: -581.9304439999548
Iteration: 6, Func. Count: 46, Neg. LLF: -581.9304486205203
Optimization terminated successfully. (Exit mode 0)
Current function value: -581.9304486205347
Iterations: 6
Function evaluations: 46
Gradient evaluations: 6
Iteration: 1, Func. Count: 5, Neg. LLF: -581.7173926352607
Iteration: 2, Func. Count: 17, Neg. LLF: -581.7207488378033
Iteration: 3, Func. Count: 25, Neg. LLF: -581.7244124246777
Iteration: 4, Func. Count: 32, Neg. LLF: -581.7548737027178
Iteration: 5, Func. Count: 40, Neg. LLF: -581.7548797487798
Iteration: 6, Func. Count: 45, Neg. LLF: -581.7548890485928
Optimization terminated successfully. (Exit mode 0)
Current function value: -581.7548890486116
Iterations: 6
Function evaluations: 45
Gradient evaluations: 6
Iteration: 1, Func. Count: 5, Neg. LLF: -581.6424789715916
Iteration: 2, Func. Count: 17, Neg. LLF: -581.6445298873417
Iteration: 3, Func. Count: 25, Neg. LLF: -581.6484251091739
Iteration: 4, Func. Count: 32, Neg. LLF: -581.6710229542783
Iteration: 5, Func. Count: 40, Neg. LLF: -581.6710466473569
Iteration: 6, Func. Count: 45, Neg. LLF: -581.6710660020638
Optimization terminated successfully. (Exit mode 0)
Current function value: -581.6710660020874
Iterations: 6
Function evaluations: 45
Gradient evaluations: 6
Iteration: 1, Func. Count: 5, Neg. LLF: -580.1920647099689
Iteration: 2, Func. Count: 17, Neg. LLF: -580.1926263039411
Iteration: 3, Func. Count: 25, Neg. LLF: -580.2075351222575
Iteration: 4, Func. Count: 32, Neg. LLF: -580.2137459780272
Iteration: 5, Func. Count: 40, Neg. LLF: -580.2139652882424
Iteration: 6, Func. Count: 46, Neg. LLF: -580.2141300562578
Iteration: 7, Func. Count: 51, Neg. LLF: -580.2141370108775
Optimization terminated successfully. (Exit mode 0)
Current function value: -580.2141370108761
Iterations: 7
Function evaluations: 51
Gradient evaluations: 7
Iteration: 1, Func. Count: 5, Neg. LLF: -580.3558539418278
Iteration: 2, Func. Count: 17, Neg. LLF: -580.3599339087866
Optimization terminated successfully. (Exit mode 0)
Current function value: -580.3599343982435
Iterations: 2
Function evaluations: 22
Gradient evaluations: 2
Iteration: 1, Func. Count: 5, Neg. LLF: -581.6166840583262
Iteration: 2, Func. Count: 17, Neg. LLF: -581.6172043613609
Iteration: 3, Func. Count: 26, Neg. LLF: -581.6173067172526
Iteration: 4, Func. Count: 34, Neg. LLF: -581.6289822830148
Iteration: 5, Func. Count: 41, Neg. LLF: -581.6332688850273
Iteration: 6, Func. Count: 49, Neg. LLF: -581.6334168071253
Iteration: 7, Func. Count: 54, Neg. LLF: -581.6334340596945
Optimization terminated successfully. (Exit mode 0)
Current function value: -581.6334340597026
Iterations: 8
Function evaluations: 54
Gradient evaluations: 7
Iteration: 1, Func. Count: 5, Neg. LLF: -582.347837128378
Iteration: 2, Func. Count: 17, Neg. LLF: -582.348065249369
Iteration: 3, Func. Count: 25, Neg. LLF: -582.3570729976374
Iteration: 4, Func. Count: 32, Neg. LLF: -582.3628884681777
Iteration: 5, Func. Count: 40, Neg. LLF: -582.3629292093678
Iteration: 6, Func. Count: 45, Neg. LLF: -582.3629466352829
Optimization terminated successfully. (Exit mode 0)
Current function value: -582.3629466352943
Iterations: 6
Function evaluations: 45
Gradient evaluations: 6
Iteration: 1, Func. Count: 5, Neg. LLF: -582.207602176841
Iteration: 2, Func. Count: 17, Neg. LLF: -582.2076551843
Iteration: 3, Func. Count: 26, Neg. LLF: -582.2082175533042
Iteration: 4, Func. Count: 34, Neg. LLF: -582.217565741782
Iteration: 5, Func. Count: 43, Neg. LLF: -582.2178591167486
Iteration: 6, Func. Count: 50, Neg. LLF: -582.2227754676908
Iteration: 7, Func. Count: 58, Neg. LLF: -582.2228580740191
Iteration: 8, Func. Count: 63, Neg. LLF: -582.2228924038765
Optimization terminated successfully. (Exit mode 0)
Current function value: -582.2228924038839
Iterations: 9
Function evaluations: 63
Gradient evaluations: 8
Iteration: 1, Func. Count: 5, Neg. LLF: -582.376463537738
Iteration: 2, Func. Count: 17, Neg. LLF: -582.376919800413
Iteration: 3, Func. Count: 25, Neg. LLF: -582.3827669910548
Iteration: 4, Func. Count: 32, Neg. LLF: -582.397025422987
Iteration: 5, Func. Count: 40, Neg. LLF: -582.397035557774
Iteration: 6, Func. Count: 45, Neg. LLF: -582.3970552055929
Optimization terminated successfully. (Exit mode 0)
Current function value: -582.3970552056121
Iterations: 6
Function evaluations: 45
Gradient evaluations: 6
Iteration: 1, Func. Count: 5, Neg. LLF: -583.221027240163
Iteration: 2, Func. Count: 17, Neg. LLF: -583.2216796189477
Iteration: 3, Func. Count: 25, Neg. LLF: -583.2244249281407
Iteration: 4, Func. Count: 32, Neg. LLF: -583.2498125810921
Iteration: 5, Func. Count: 40, Neg. LLF: -583.2499468714672
Iteration: 6, Func. Count: 45, Neg. LLF: -583.2499661148782
Optimization terminated successfully. (Exit mode 0)
Current function value: -583.2499661148997
Iterations: 6
Function evaluations: 45
Gradient evaluations: 6
Iteration: 1, Func. Count: 5, Neg. LLF: -583.2362065801266
Iteration: 2, Func. Count: 17, Neg. LLF: -583.2363819057209
Iteration: 3, Func. Count: 25, Neg. LLF: -583.2389332436536
Iteration: 4, Func. Count: 32, Neg. LLF: -583.2663973095409
Iteration: 5, Func. Count: 40, Neg. LLF: -583.2664414222345
Iteration: 6, Func. Count: 45, Neg. LLF: -583.2664569875403
Optimization terminated successfully. (Exit mode 0)
Current function value: -583.2664569875626
Iterations: 6
Function evaluations: 45
Gradient evaluations: 6
Iteration: 1, Func. Count: 5, Neg. LLF: -583.4639891600642
Iteration: 2, Func. Count: 17, Neg. LLF: -583.4656242518245
Iteration: 3, Func. Count: 26, Neg. LLF: -583.4664255562371
Iteration: 4, Func. Count: 32, Neg. LLF: -583.4926939080003
Iteration: 5, Func. Count: 40, Neg. LLF: -583.49328735959
Iteration: 6, Func. Count: 45, Neg. LLF: -583.5158081766303
Iteration: 7, Func. Count: 50, Neg. LLF: -583.5158512160943
Optimization terminated successfully. (Exit mode 0)
Current function value: -583.5158512161202
Iterations: 7
Function evaluations: 50
Gradient evaluations: 7
Iteration: 1, Func. Count: 5, Neg. LLF: -584.28019297371
Iteration: 2, Func. Count: 17, Neg. LLF: -584.2818672490936
Iteration: 3, Func. Count: 26, Neg. LLF: -584.2835126122247
Iteration: 4, Func. Count: 38, Neg. LLF: -584.2855088399543
Iteration: 5, Func. Count: 46, Neg. LLF: -584.327866590574
Iteration: 6, Func. Count: 55, Neg. LLF: -584.3353599182759
Iteration: 7, Func. Count: 62, Neg. LLF: -584.336245252432
Iteration: 8, Func. Count: 70, Neg. LLF: -584.3365256750326
Iteration: 9, Func. Count: 75, Neg. LLF: -584.3365269394851
Optimization terminated successfully. (Exit mode 0)
Current function value: -584.3365269394824
Iterations: 11
Function evaluations: 75
Gradient evaluations: 9
Iteration: 1, Func. Count: 5, Neg. LLF: -584.1797543928792
Iteration: 2, Func. Count: 17, Neg. LLF: -584.180871144714
Iteration: 3, Func. Count: 26, Neg. LLF: -584.1811393376951
Iteration: 4, Func. Count: 32, Neg. LLF: -584.2182064282734
Iteration: 5, Func. Count: 40, Neg. LLF: -584.2188996784698
Iteration: 6, Func. Count: 45, Neg. LLF: -584.2378166057064
Iteration: 7, Func. Count: 50, Neg. LLF: -584.2378479923665
Optimization terminated successfully. (Exit mode 0)
Current function value: -584.2378479923777
Iterations: 7
Function evaluations: 50
Gradient evaluations: 7
Iteration: 1, Func. Count: 5, Neg. LLF: -584.1582628757537
Iteration: 2, Func. Count: 17, Neg. LLF: -584.1587833654664
Iteration: 3, Func. Count: 26, Neg. LLF: -584.1591551427812
Iteration: 4, Func. Count: 32, Neg. LLF: -584.1879263855058
Iteration: 5, Func. Count: 40, Neg. LLF: -584.1885581703054
Iteration: 6, Func. Count: 45, Neg. LLF: -584.2107316581221
Iteration: 7, Func. Count: 50, Neg. LLF: -584.2107730214705
Optimization terminated successfully. (Exit mode 0)
Current function value: -584.21077302149
Iterations: 7
Function evaluations: 50
Gradient evaluations: 7
Iteration: 1, Func. Count: 5, Neg. LLF: -584.1426782313695
Iteration: 2, Func. Count: 17, Neg. LLF: -584.1433693084093
Iteration: 3, Func. Count: 26, Neg. LLF: -584.143799465591
Iteration: 4, Func. Count: 32, Neg. LLF: -584.1663531350207
Iteration: 5, Func. Count: 40, Neg. LLF: -584.1669961651717
Iteration: 6, Func. Count: 45, Neg. LLF: -584.1915621009282
Iteration: 7, Func. Count: 50, Neg. LLF: -584.1917018998051
Iteration: 8, Func. Count: 55, Neg. LLF: -584.1917029039699
Optimization terminated successfully. (Exit mode 0)
Current function value: -584.1917029039707
Iterations: 8
Function evaluations: 55
Gradient evaluations: 8
Iteration: 1, Func. Count: 5, Neg. LLF: -584.3828562600963
Iteration: 2, Func. Count: 17, Neg. LLF: -584.3829945325276
Iteration: 3, Func. Count: 27, Neg. LLF: -584.3831296050182
Iteration: 4, Func. Count: 39, Neg. LLF: -584.3833213407847
Iteration: 5, Func. Count: 47, Neg. LLF: -584.4251619289313
Optimization terminated successfully. (Exit mode 0)
Current function value: -584.4251626970081
Iterations: 7
Function evaluations: 52
Gradient evaluations: 5
Iteration: 1, Func. Count: 5, Neg. LLF: -585.0524912378826
Iteration: 2, Func. Count: 17, Neg. LLF: -585.0528434702167
Iteration: 3, Func. Count: 25, Neg. LLF: -585.081863298755
Iteration: 4, Func. Count: 34, Neg. LLF: -585.0900643103498
Iteration: 5, Func. Count: 41, Neg. LLF: -585.0911092542381
Iteration: 6, Func. Count: 49, Neg. LLF: -585.0913168254955
Iteration: 7, Func. Count: 54, Neg. LLF: -585.0913216902322
Optimization terminated successfully. (Exit mode 0)
Current function value: -585.0913216902031
Iterations: 8
Function evaluations: 54
Gradient evaluations: 7
Iteration: 1, Func. Count: 5, Neg. LLF: -585.1109173955326
Iteration: 2, Func. Count: 17, Neg. LLF: -585.1109507978531
Iteration: 3, Func. Count: 28, Neg. LLF: -585.1109846623103
Iteration: 4, Func. Count: 40, Neg. LLF: -585.1110053416464
Iteration: 5, Func. Count: 48, Neg. LLF: -585.1454146645576
Iteration: 6, Func. Count: 56, Neg. LLF: -585.1459215400214
Iteration: 7, Func. Count: 63, Neg. LLF: -585.1474452738336
Iteration: 8, Func. Count: 70, Neg. LLF: -585.1475076763104
Iteration: 9, Func. Count: 75, Neg. LLF: -585.1477895703138
Optimization terminated successfully. (Exit mode 0)
Current function value: -585.1477895702703
Iterations: 11
Function evaluations: 75
Gradient evaluations: 9
Iteration: 1, Func. Count: 5, Neg. LLF: -585.7327486971457
Optimization terminated successfully. (Exit mode 0)
Current function value: -585.7327490343607
Iterations: 1
Function evaluations: 13
Gradient evaluations: 1
Iteration: 1, Func. Count: 5, Neg. LLF: -586.5823691159939
Iteration: 2, Func. Count: 17, Neg. LLF: -586.5849441485061
Iteration: 3, Func. Count: 25, Neg. LLF: -586.5852056721317
Iteration: 4, Func. Count: 37, Neg. LLF: -586.585210076664
Optimization terminated successfully. (Exit mode 0)
Current function value: -586.5852100410524
Iterations: 5
Function evaluations: 48
Gradient evaluations: 4
Iteration: 1, Func. Count: 5, Neg. LLF: -587.279636209161
Iteration: 2, Func. Count: 17, Neg. LLF: -587.2901538356068
Iteration: 3, Func. Count: 25, Neg. LLF: -587.3110522943332
Iteration: 4, Func. Count: 32, Neg. LLF: -587.3202512812788
Iteration: 5, Func. Count: 39, Neg. LLF: -587.3264209437582
Iteration: 6, Func. Count: 45, Neg. LLF: -587.3264280564447
Optimization terminated successfully. (Exit mode 0)
Current function value: -587.3264284870048
Iterations: 6
Function evaluations: 48
Gradient evaluations: 6
Iteration: 1, Func. Count: 5, Neg. LLF: -588.7965654738091
Iteration: 2, Func. Count: 17, Neg. LLF: -588.803565996611
Iteration: 3, Func. Count: 25, Neg. LLF: -588.8037540333387
Iteration: 4, Func. Count: 37, Neg. LLF: -588.8038226068703
Iteration: 5, Func. Count: 45, Neg. LLF: -588.8215139435547
Iteration: 6, Func. Count: 52, Neg. LLF: -588.8373823724555
Iteration: 7, Func. Count: 60, Neg. LLF: -588.8375424790881
Iteration: 8, Func. Count: 65, Neg. LLF: -588.8376643292224
Iteration: 9, Func. Count: 70, Neg. LLF: -588.8376956781997
Optimization terminated successfully. (Exit mode 0)
Current function value: -588.8376956782015
Iterations: 10
Function evaluations: 70
Gradient evaluations: 9
Iteration: 1, Func. Count: 5, Neg. LLF: -589.0404513217954
Iteration: 2, Func. Count: 17, Neg. LLF: -589.057713843094
Iteration: 3, Func. Count: 25, Neg. LLF: -589.0592057515098
Iteration: 4, Func. Count: 31, Neg. LLF: -589.148265741888
Iteration: 5, Func. Count: 40, Neg. LLF: -589.1482701260192
Iteration: 6, Func. Count: 45, Neg. LLF: -589.1496693918139
Iteration: 7, Func. Count: 50, Neg. LLF: -589.149929041745
Optimization terminated successfully. (Exit mode 0)
Current function value: -589.1499294203879
Iterations: 7
Function evaluations: 52
Gradient evaluations: 7
Iteration: 1, Func. Count: 5, Neg. LLF: -590.5907741620147
Iteration: 2, Func. Count: 17, Neg. LLF: -590.6011653447013
Iteration: 3, Func. Count: 26, Neg. LLF: -590.602464164218
Iteration: 4, Func. Count: 32, Neg. LLF: -590.6687143320944
Iteration: 5, Func. Count: 40, Neg. LLF: -590.6692487832124
Iteration: 6, Func. Count: 45, Neg. LLF: -590.6745004914587
Iteration: 7, Func. Count: 50, Neg. LLF: -590.6745354589009
Optimization terminated successfully. (Exit mode 0)
Current function value: -590.6745354588757
Iterations: 7
Function evaluations: 50
Gradient evaluations: 7
Iteration: 1, Func. Count: 5, Neg. LLF: -590.6208390869251
Iteration: 2, Func. Count: 17, Neg. LLF: -590.6336325845173
Iteration: 3, Func. Count: 26, Neg. LLF: -590.6346432432154
Iteration: 4, Func. Count: 32, Neg. LLF: -590.7056312714724
Iteration: 5, Func. Count: 40, Neg. LLF: -590.7062032475044
Iteration: 6, Func. Count: 45, Neg. LLF: -590.7104978799014
Iteration: 7, Func. Count: 50, Neg. LLF: -590.710531923165
Optimization terminated successfully. (Exit mode 0)
Current function value: -590.7105319231422
Iterations: 7
Function evaluations: 50
Gradient evaluations: 7
Iteration: 1, Func. Count: 5, Neg. LLF: -590.6437578861869
Iteration: 2, Func. Count: 17, Neg. LLF: -590.6560467755593
Iteration: 3, Func. Count: 26, Neg. LLF: -590.6570232081248
Iteration: 4, Func. Count: 32, Neg. LLF: -590.730173022311
Iteration: 5, Func. Count: 40, Neg. LLF: -590.7307477874597
Iteration: 6, Func. Count: 45, Neg. LLF: -590.7345620420574
Iteration: 7, Func. Count: 50, Neg. LLF: -590.734591349534
Optimization terminated successfully. (Exit mode 0)
Current function value: -590.734591349514
Iterations: 7
Function evaluations: 50
Gradient evaluations: 7
Iteration: 1, Func. Count: 5, Neg. LLF: -590.4727340187651
Iteration: 2, Func. Count: 17, Neg. LLF: -590.4864235179402
Iteration: 3, Func. Count: 26, Neg. LLF: -590.4880902999959
Iteration: 4, Func. Count: 32, Neg. LLF: -590.5512995696365
Iteration: 5, Func. Count: 40, Neg. LLF: -590.551808727657
Iteration: 6, Func. Count: 45, Neg. LLF: -590.5575774678807
Iteration: 7, Func. Count: 50, Neg. LLF: -590.557606606561
Optimization terminated successfully. (Exit mode 0)
Current function value: -590.5576066065379
Iterations: 7
Function evaluations: 50
Gradient evaluations: 7
Iteration: 1, Func. Count: 5, Neg. LLF: -590.4081665448239
Iteration: 2, Func. Count: 17, Neg. LLF: -590.4193943801336
Iteration: 3, Func. Count: 25, Neg. LLF: -590.4203522528442
Iteration: 4, Func. Count: 31, Neg. LLF: -590.4727857432364
Iteration: 5, Func. Count: 39, Neg. LLF: -590.472917880224
Iteration: 6, Func. Count: 45, Neg. LLF: -590.481468263434
Iteration: 7, Func. Count: 50, Neg. LLF: -590.4814765167197
Optimization terminated successfully. (Exit mode 0)
Current function value: -590.4814765167173
Iterations: 7
Function evaluations: 50
Gradient evaluations: 7
Iteration: 1, Func. Count: 5, Neg. LLF: -590.3373263705436
Iteration: 2, Func. Count: 17, Neg. LLF: -590.3493954660976
Iteration: 3, Func. Count: 25, Neg. LLF: -590.3509712010703
Iteration: 4, Func. Count: 31, Neg. LLF: -590.398072548311
Iteration: 5, Func. Count: 39, Neg. LLF: -590.3981582918719
Iteration: 6, Func. Count: 45, Neg. LLF: -590.408613451398
Iteration: 7, Func. Count: 50, Neg. LLF: -590.4087227002649
Iteration: 8, Func. Count: 55, Neg. LLF: -590.4087398292277
Optimization terminated successfully. (Exit mode 0)
Current function value: -590.4087398292296
Iterations: 8
Function evaluations: 55
Gradient evaluations: 8
Iteration: 1, Func. Count: 5, Neg. LLF: -590.1473430428821
Iteration: 2, Func. Count: 17, Neg. LLF: -590.1559415061192
Iteration: 3, Func. Count: 25, Neg. LLF: -590.1579122553654
Iteration: 4, Func. Count: 31, Neg. LLF: -590.2050103537858
Iteration: 5, Func. Count: 39, Neg. LLF: -590.2050527974459
Iteration: 6, Func. Count: 45, Neg. LLF: -590.2150820785248
Iteration: 7, Func. Count: 50, Neg. LLF: -590.2150916922634
Optimization terminated successfully. (Exit mode 0)
Current function value: -590.2150916922653
Iterations: 7
Function evaluations: 50
Gradient evaluations: 7
Iteration: 1, Func. Count: 5, Neg. LLF: -590.4352038030405
Iteration: 2, Func. Count: 17, Neg. LLF: -590.4559332040733
Iteration: 3, Func. Count: 26, Neg. LLF: -590.4564672778231
Iteration: 4, Func. Count: 32, Neg. LLF: -590.524273992845
Iteration: 5, Func. Count: 40, Neg. LLF: -590.5248387570857
Iteration: 6, Func. Count: 45, Neg. LLF: -590.529862852957
Iteration: 7, Func. Count: 50, Neg. LLF: -590.529893003636
Optimization terminated successfully. (Exit mode 0)
Current function value: -590.5298930036133
Iterations: 7
Function evaluations: 50
Gradient evaluations: 7
Iteration: 1, Func. Count: 5, Neg. LLF: -591.1372073223883
Iteration: 2, Func. Count: 17, Neg. LLF: -591.1509294921888
Positive directional derivative for linesearch (Exit mode 8)
Current function value: -591.1509294647142
Iterations: 6
Function evaluations: 17
Gradient evaluations: 2
/Users/austincostello1/anaconda3/lib/python3.6/site-packages/arch/univariate/base.py:522: ConvergenceWarning: The optimizer returned code 8. The message is: Positive directional derivative for linesearch See scipy.optimize.fmin_slsqp for code meaning. ConvergenceWarning) /Users/austincostello1/anaconda3/lib/python3.6/site-packages/arch/univariate/base.py:522: ConvergenceWarning: The optimizer returned code 8. The message is: Positive directional derivative for linesearch See scipy.optimize.fmin_slsqp for code meaning. ConvergenceWarning) /Users/austincostello1/anaconda3/lib/python3.6/site-packages/arch/univariate/base.py:522: ConvergenceWarning: The optimizer returned code 8. The message is: Positive directional derivative for linesearch See scipy.optimize.fmin_slsqp for code meaning. ConvergenceWarning)
Iteration: 1, Func. Count: 5, Neg. LLF: -592.0014237539554
Iteration: 2, Func. Count: 17, Neg. LLF: -592.0264998219138
Positive directional derivative for linesearch (Exit mode 8)
Current function value: -592.0264997929435
Iterations: 6
Function evaluations: 17
Gradient evaluations: 2
Iteration: 1, Func. Count: 5, Neg. LLF: -593.0291003244613
Iteration: 2, Func. Count: 17, Neg. LLF: -593.0836311972586
Positive directional derivative for linesearch (Exit mode 8)
Current function value: -593.0836311678279
Iterations: 6
Function evaluations: 17
Gradient evaluations: 2
Iteration: 1, Func. Count: 5, Neg. LLF: -595.2218177137199
Iteration: 2, Func. Count: 17, Neg. LLF: -595.2533855456054
Iteration: 3, Func. Count: 26, Neg. LLF: -595.2539271005218
Iteration: 4, Func. Count: 32, Neg. LLF: -595.3098569594888
Iteration: 5, Func. Count: 40, Neg. LLF: -595.3103472296044
Iteration: 6, Func. Count: 45, Neg. LLF: -595.3185890073356
Iteration: 7, Func. Count: 50, Neg. LLF: -595.3186378372461
Optimization terminated successfully. (Exit mode 0)
Current function value: -595.3186378372086
Iterations: 7
Function evaluations: 50
Gradient evaluations: 7
Iteration: 1, Func. Count: 5, Neg. LLF: -595.370378791959
Iteration: 2, Func. Count: 17, Neg. LLF: -595.4021335801035
Iteration: 3, Func. Count: 26, Neg. LLF: -595.4024920762179
Iteration: 4, Func. Count: 32, Neg. LLF: -595.4691890805602
Iteration: 5, Func. Count: 40, Neg. LLF: -595.4697317280752
Iteration: 6, Func. Count: 45, Neg. LLF: -595.4753144887309
Iteration: 7, Func. Count: 50, Neg. LLF: -595.4753527727
Optimization terminated successfully. (Exit mode 0)
Current function value: -595.4753527726718
Iterations: 7
Function evaluations: 50
Gradient evaluations: 7
Iteration: 1, Func. Count: 5, Neg. LLF: -595.7708783083212
Iteration: 2, Func. Count: 17, Neg. LLF: -595.8086477539505
Iteration: 3, Func. Count: 24, Neg. LLF: -595.8492493541204
Iteration: 4, Func. Count: 32, Neg. LLF: -595.8691738708205
Iteration: 5, Func. Count: 39, Neg. LLF: -595.882296913226
Iteration: 6, Func. Count: 46, Neg. LLF: -595.8857943762231
Iteration: 7, Func. Count: 54, Neg. LLF: -595.8858207812507
Iteration: 8, Func. Count: 60, Neg. LLF: -595.8858598770292
Optimization terminated successfully. (Exit mode 0)
Current function value: -595.8858598770543
Iterations: 8
Function evaluations: 60
Gradient evaluations: 8
Iteration: 1, Func. Count: 5, Neg. LLF: -596.1898432097753
Iteration: 2, Func. Count: 17, Neg. LLF: -596.2352069605188
Iteration: 3, Func. Count: 24, Neg. LLF: -596.2813891386435
Iteration: 4, Func. Count: 36, Neg. LLF: -596.2814003995009
Iteration: 5, Func. Count: 45, Neg. LLF: -596.281407403033
Optimization terminated successfully. (Exit mode 0)
Current function value: -596.2814074701782
Iterations: 7
Function evaluations: 53
Gradient evaluations: 5
Iteration: 1, Func. Count: 5, Neg. LLF: -598.023324522434
Iteration: 2, Func. Count: 17, Neg. LLF: -598.0363496533942
Iteration: 3, Func. Count: 25, Neg. LLF: -598.0373260175629
Iteration: 4, Func. Count: 31, Neg. LLF: -598.1373323975163
Iteration: 5, Func. Count: 39, Neg. LLF: -598.1373427219103
Iteration: 6, Func. Count: 45, Neg. LLF: -598.1422220579673
Iteration: 7, Func. Count: 50, Neg. LLF: -598.1422323626782
Optimization terminated successfully. (Exit mode 0)
Current function value: -598.1422323626729
Iterations: 7
Function evaluations: 50
Gradient evaluations: 7
Iteration: 1, Func. Count: 5, Neg. LLF: -600.2416068015855
Iteration: 2, Func. Count: 17, Neg. LLF: -600.266956794343
Iteration: 3, Func. Count: 25, Neg. LLF: -600.274590796407
Iteration: 4, Func. Count: 32, Neg. LLF: -600.2882536642883
Iteration: 5, Func. Count: 39, Neg. LLF: -600.3271856477045
Iteration: 6, Func. Count: 47, Neg. LLF: -600.3273966058109
Iteration: 7, Func. Count: 52, Neg. LLF: -600.3273987398857
Optimization terminated successfully. (Exit mode 0)
Current function value: -600.3273987398852
Iterations: 7
Function evaluations: 52
Gradient evaluations: 7
Iteration: 1, Func. Count: 5, Neg. LLF: -600.7008628658878
Iteration: 2, Func. Count: 17, Neg. LLF: -600.7307347242466
Iteration: 3, Func. Count: 26, Neg. LLF: -600.73245838978
Iteration: 4, Func. Count: 32, Neg. LLF: -600.8065234806722
Iteration: 5, Func. Count: 40, Neg. LLF: -600.8070693324524
Iteration: 6, Func. Count: 45, Neg. LLF: -600.8132631547339
Iteration: 7, Func. Count: 50, Neg. LLF: -600.8133145114027
Optimization terminated successfully. (Exit mode 0)
Current function value: -600.8133145113661
Iterations: 7
Function evaluations: 50
Gradient evaluations: 7
Iteration: 1, Func. Count: 5, Neg. LLF: -601.6507491991626
Iteration: 2, Func. Count: 17, Neg. LLF: -601.6695779486431
Iteration: 3, Func. Count: 26, Neg. LLF: -601.6699234164474
Iteration: 4, Func. Count: 32, Neg. LLF: -601.7502382362529
Iteration: 5, Func. Count: 40, Neg. LLF: -601.7509228475554
Iteration: 6, Func. Count: 45, Neg. LLF: -601.7578459298074
Iteration: 7, Func. Count: 50, Neg. LLF: -601.7579157224791
Optimization terminated successfully. (Exit mode 0)
Current function value: -601.7579157224363
Iterations: 7
Function evaluations: 50
Gradient evaluations: 7
Iteration: 1, Func. Count: 5, Neg. LLF: -602.7256075815685
Iteration: 2, Func. Count: 17, Neg. LLF: -602.7311790881093
Iteration: 3, Func. Count: 25, Neg. LLF: -602.7334833883323
Iteration: 4, Func. Count: 31, Neg. LLF: -602.805926160079
Iteration: 5, Func. Count: 39, Neg. LLF: -602.8060394721274
Iteration: 6, Func. Count: 44, Neg. LLF: -602.8106561779357
Iteration: 7, Func. Count: 49, Neg. LLF: -602.8114400588809
Optimization terminated successfully. (Exit mode 0)
Current function value: -602.811440942421
Iterations: 7
Function evaluations: 50
Gradient evaluations: 7
Iteration: 1, Func. Count: 5, Neg. LLF: -603.8412093486165
Iteration: 2, Func. Count: 17, Neg. LLF: -603.868161576412
Iteration: 3, Func. Count: 24, Neg. LLF: -603.875833801405
Iteration: 4, Func. Count: 33, Neg. LLF: -603.8762361589224
Iteration: 5, Func. Count: 41, Neg. LLF: -603.8954930282493
Iteration: 6, Func. Count: 53, Neg. LLF: -603.8980136235186
Iteration: 7, Func. Count: 62, Neg. LLF: -603.8985773953639
Iteration: 8, Func. Count: 68, Neg. LLF: -603.9254191889377
Iteration: 9, Func. Count: 75, Neg. LLF: -603.9255114220704
Iteration: 10, Func. Count: 80, Neg. LLF: -603.9361011783874
Iteration: 11, Func. Count: 85, Neg. LLF: -603.936492555566
Iteration: 12, Func. Count: 90, Neg. LLF: -603.9364937007164
Optimization terminated successfully. (Exit mode 0)
Current function value: -603.9364937007182
Iterations: 13
Function evaluations: 90
Gradient evaluations: 12
Iteration: 1, Func. Count: 5, Neg. LLF: -605.8444344729246
Iteration: 2, Func. Count: 17, Neg. LLF: -605.8885108736739
Iteration: 3, Func. Count: 27, Neg. LLF: -605.8885124680264
Iteration: 4, Func. Count: 39, Neg. LLF: -605.8885151557674
Iteration: 5, Func. Count: 47, Neg. LLF: -605.9053977898366
Iteration: 6, Func. Count: 55, Neg. LLF: -605.9083563049617
Iteration: 7, Func. Count: 62, Neg. LLF: -605.9381471339195
Iteration: 8, Func. Count: 71, Neg. LLF: -605.9381553437704
Iteration: 9, Func. Count: 76, Neg. LLF: -605.9382677829409
Optimization terminated successfully. (Exit mode 0)
Current function value: -605.9382677829589
Iterations: 10
Function evaluations: 76
Gradient evaluations: 9
Iteration: 1, Func. Count: 5, Neg. LLF: -606.4312135694452
Iteration: 2, Func. Count: 17, Neg. LLF: -606.4707506291436
Iteration: 3, Func. Count: 25, Neg. LLF: -606.470948026554
Iteration: 4, Func. Count: 37, Neg. LLF: -606.4712544662214
Iteration: 5, Func. Count: 45, Neg. LLF: -606.4714837861595
Iteration: 6, Func. Count: 53, Neg. LLF: -606.478834769653
Iteration: 7, Func. Count: 61, Neg. LLF: -606.4833102454022
Iteration: 8, Func. Count: 67, Neg. LLF: -606.5123175113551
Iteration: 9, Func. Count: 74, Neg. LLF: -606.5138474772734
Iteration: 10, Func. Count: 80, Neg. LLF: -606.5250126536699
Iteration: 11, Func. Count: 85, Neg. LLF: -606.5252874776345
Optimization terminated successfully. (Exit mode 0)
Current function value: -606.5252874776542
Iterations: 12
Function evaluations: 85
Gradient evaluations: 11
Iteration: 1, Func. Count: 5, Neg. LLF: -601.5730143458637
Iteration: 2, Func. Count: 17, Neg. LLF: -601.5808967350013
Iteration: 3, Func. Count: 25, Neg. LLF: -601.5856421146706
Iteration: 4, Func. Count: 31, Neg. LLF: -601.6857045251056
Iteration: 5, Func. Count: 39, Neg. LLF: -601.6866011220663
Iteration: 6, Func. Count: 45, Neg. LLF: -601.6919493797634
Iteration: 7, Func. Count: 50, Neg. LLF: -601.6919676120579
Optimization terminated successfully. (Exit mode 0)
Current function value: -601.6919676120406
Iterations: 7
Function evaluations: 50
Gradient evaluations: 7
Iteration: 1, Func. Count: 5, Neg. LLF: -601.4127994018285
Iteration: 2, Func. Count: 17, Neg. LLF: -601.433553560217
Iteration: 3, Func. Count: 26, Neg. LLF: -601.4345982024483
Iteration: 4, Func. Count: 32, Neg. LLF: -601.5042737545605
Iteration: 5, Func. Count: 40, Neg. LLF: -601.5049418014755
Iteration: 6, Func. Count: 45, Neg. LLF: -601.5131105295052
Iteration: 7, Func. Count: 50, Neg. LLF: -601.513162962357
Optimization terminated successfully. (Exit mode 0)
Current function value: -601.5131629624091
Iterations: 7
Function evaluations: 50
Gradient evaluations: 7
Iteration: 1, Func. Count: 5, Neg. LLF: -600.1887836402
Iteration: 2, Func. Count: 17, Neg. LLF: -600.1952434974316
Iteration: 3, Func. Count: 25, Neg. LLF: -600.2423524463009
Iteration: 4, Func. Count: 34, Neg. LLF: -600.2757815085422
Iteration: 5, Func. Count: 42, Neg. LLF: -600.2838229343398
Iteration: 6, Func. Count: 49, Neg. LLF: -600.2848505677364
Iteration: 7, Func. Count: 56, Neg. LLF: -600.2850017574281
Iteration: 8, Func. Count: 61, Neg. LLF: -600.2850121944095
Optimization terminated successfully. (Exit mode 0)
Current function value: -600.2850121944141
Iterations: 8
Function evaluations: 61
Gradient evaluations: 8
Iteration: 1, Func. Count: 5, Neg. LLF: -600.2926510409269
Iteration: 2, Func. Count: 17, Neg. LLF: -600.3168254580198
Positive directional derivative for linesearch (Exit mode 8)
Current function value: -600.316825433902
Iterations: 6
Function evaluations: 17
Gradient evaluations: 2
Iteration: 1, Func. Count: 5, Neg. LLF: -600.1851594913699
Iteration: 2, Func. Count: 17, Neg. LLF: -600.1986969294773
Iteration: 3, Func. Count: 24, Neg. LLF: -600.2353199350781
Iteration: 4, Func. Count: 35, Neg. LLF: -600.2353274730683
Optimization terminated successfully. (Exit mode 0)
Current function value: -600.2353283233422
Iterations: 5
Function evaluations: 40
Gradient evaluations: 4
Iteration: 1, Func. Count: 5, Neg. LLF: -600.5003290942038
Iteration: 2, Func. Count: 17, Neg. LLF: -600.5113023917962
Iteration: 3, Func. Count: 24, Neg. LLF: -600.549141438019
Iteration: 4, Func. Count: 36, Neg. LLF: -600.5491546831192
Iteration: 5, Func. Count: 44, Neg. LLF: -600.5631640558547
Iteration: 6, Func. Count: 51, Neg. LLF: -600.5774697690051
Iteration: 7, Func. Count: 58, Neg. LLF: -600.5800270950026
Iteration: 8, Func. Count: 64, Neg. LLF: -600.5806385116954
Iteration: 9, Func. Count: 69, Neg. LLF: -600.5810530920205
Optimization terminated successfully. (Exit mode 0)
Current function value: -600.5810534750542
Iterations: 10
Function evaluations: 71
Gradient evaluations: 9
/Users/austincostello1/anaconda3/lib/python3.6/site-packages/arch/univariate/base.py:522: ConvergenceWarning: The optimizer returned code 8. The message is: Positive directional derivative for linesearch See scipy.optimize.fmin_slsqp for code meaning. ConvergenceWarning) /Users/austincostello1/anaconda3/lib/python3.6/site-packages/arch/univariate/base.py:522: ConvergenceWarning: The optimizer returned code 8. The message is: Positive directional derivative for linesearch See scipy.optimize.fmin_slsqp for code meaning. ConvergenceWarning)
Iteration: 1, Func. Count: 5, Neg. LLF: -601.416632001706
Iteration: 2, Func. Count: 17, Neg. LLF: -601.4405609933917
Positive directional derivative for linesearch (Exit mode 8)
Current function value: -601.4405609745118
Iterations: 6
Function evaluations: 17
Gradient evaluations: 2
Iteration: 1, Func. Count: 5, Neg. LLF: -602.6835901695233
Iteration: 2, Func. Count: 17, Neg. LLF: -602.7229663264936
Iteration: 3, Func. Count: 24, Neg. LLF: -602.7587122292248
Iteration: 4, Func. Count: 36, Neg. LLF: -602.7587400033558
Iteration: 5, Func. Count: 44, Neg. LLF: -602.7587991199116
Iteration: 6, Func. Count: 56, Neg. LLF: -602.7588072006338
Iteration: 7, Func. Count: 64, Neg. LLF: -602.7730197495312
Iteration: 8, Func. Count: 71, Neg. LLF: -602.788111231532
Iteration: 9, Func. Count: 79, Neg. LLF: -602.788318222321
Iteration: 10, Func. Count: 85, Neg. LLF: -602.788664013058
Iteration: 11, Func. Count: 90, Neg. LLF: -602.7886719849946
Optimization terminated successfully. (Exit mode 0)
Current function value: -602.7886719850051
Iterations: 13
Function evaluations: 90
Gradient evaluations: 11
Iteration: 1, Func. Count: 5, Neg. LLF: -602.54750561983
Iteration: 2, Func. Count: 17, Neg. LLF: -602.5608062148053
Iteration: 3, Func. Count: 25, Neg. LLF: -602.6136582858336
Iteration: 4, Func. Count: 32, Neg. LLF: -602.6195792097475
Iteration: 5, Func. Count: 40, Neg. LLF: -602.62028805443
Iteration: 6, Func. Count: 45, Neg. LLF: -602.620330334212
Iteration: 7, Func. Count: 50, Neg. LLF: -602.6203314149857
Optimization terminated successfully. (Exit mode 0)
Current function value: -602.6203314149805
Iterations: 7
Function evaluations: 50
Gradient evaluations: 7
Iteration: 1, Func. Count: 5, Neg. LLF: -603.3178435934495
Iteration: 2, Func. Count: 17, Neg. LLF: -603.3770236855412
Iteration: 3, Func. Count: 25, Neg. LLF: -603.4340475320714
Iteration: 4, Func. Count: 33, Neg. LLF: -603.4353126321364
Iteration: 5, Func. Count: 40, Neg. LLF: -603.4356330458451
Iteration: 6, Func. Count: 48, Neg. LLF: -603.4370926881484
Iteration: 7, Func. Count: 53, Neg. LLF: -603.4372022783319
Optimization terminated successfully. (Exit mode 0)
Current function value: -603.4372023436497
Iterations: 7
Function evaluations: 56
Gradient evaluations: 7
Iteration: 1, Func. Count: 5, Neg. LLF: -605.7889370383186
Iteration: 2, Func. Count: 17, Neg. LLF: -605.9170236838092
Iteration: 3, Func. Count: 24, Neg. LLF: -605.9296807110718
Iteration: 4, Func. Count: 31, Neg. LLF: -605.9354907170128
Iteration: 5, Func. Count: 39, Neg. LLF: -605.935792060129
Iteration: 6, Func. Count: 47, Neg. LLF: -605.9358170677042
Optimization terminated successfully. (Exit mode 0)
Current function value: -605.9358170677368
Iterations: 6
Function evaluations: 47
Gradient evaluations: 6
Iteration: 1, Func. Count: 5, Neg. LLF: -605.976974615635
Iteration: 2, Func. Count: 17, Neg. LLF: -606.1175552305638
Iteration: 3, Func. Count: 28, Neg. LLF: -606.1175654501803
Iteration: 4, Func. Count: 36, Neg. LLF: -606.1292851784901
Iteration: 5, Func. Count: 43, Neg. LLF: -606.1370985864882
Iteration: 6, Func. Count: 51, Neg. LLF: -606.1372796208016
Iteration: 7, Func. Count: 57, Neg. LLF: -606.137753333921
Iteration: 8, Func. Count: 62, Neg. LLF: -606.1377660569266
Optimization terminated successfully. (Exit mode 0)
Current function value: -606.1377660569386
Iterations: 9
Function evaluations: 62
Gradient evaluations: 8
Iteration: 1, Func. Count: 5, Neg. LLF: -605.8279337163742
Iteration: 2, Func. Count: 17, Neg. LLF: -606.0104095226909
Iteration: 3, Func. Count: 25, Neg. LLF: -606.0173936981624
Iteration: 4, Func. Count: 32, Neg. LLF: -606.0287248918416
Iteration: 5, Func. Count: 40, Neg. LLF: -606.0288287146446
Iteration: 6, Func. Count: 45, Neg. LLF: -606.0289196953709
Iteration: 7, Func. Count: 50, Neg. LLF: -606.0289553375809
Optimization terminated successfully. (Exit mode 0)
Current function value: -606.0289553375717
Iterations: 7
Function evaluations: 50
Gradient evaluations: 7
Iteration: 1, Func. Count: 5, Neg. LLF: -608.0650643942972
Iteration: 2, Func. Count: 17, Neg. LLF: -608.1038963986338
Positive directional derivative for linesearch (Exit mode 8)
Current function value: -608.1038963920751
Iterations: 6
Function evaluations: 17
Gradient evaluations: 2
/Users/austincostello1/anaconda3/lib/python3.6/site-packages/arch/univariate/base.py:522: ConvergenceWarning: The optimizer returned code 8. The message is: Positive directional derivative for linesearch See scipy.optimize.fmin_slsqp for code meaning. ConvergenceWarning)
Iteration: 1, Func. Count: 5, Neg. LLF: -610.046710015832
Iteration: 2, Func. Count: 17, Neg. LLF: -610.1311367361291
Iteration: 3, Func. Count: 28, Neg. LLF: -610.1311634627996
Iteration: 4, Func. Count: 40, Neg. LLF: -610.1311898279438
Iteration: 5, Func. Count: 48, Neg. LLF: -610.1313409198735
Iteration: 6, Func. Count: 59, Neg. LLF: -610.1313681043948
Iteration: 7, Func. Count: 67, Neg. LLF: -610.141391549743
Iteration: 8, Func. Count: 74, Neg. LLF: -610.1418913643126
Iteration: 9, Func. Count: 82, Neg. LLF: -610.1423085788442
Iteration: 10, Func. Count: 88, Neg. LLF: -610.142574746908
Iteration: 11, Func. Count: 93, Neg. LLF: -610.1425956052639
Iteration: 12, Func. Count: 98, Neg. LLF: -610.1425977812305
Optimization terminated successfully. (Exit mode 0)
Current function value: -610.1425977812303
Iterations: 15
Function evaluations: 98
Gradient evaluations: 12
Iteration: 1, Func. Count: 5, Neg. LLF: -610.6895427159969
Iteration: 2, Func. Count: 17, Neg. LLF: -610.7433191723158
Iteration: 3, Func. Count: 27, Neg. LLF: -610.743674621277
Iteration: 4, Func. Count: 36, Neg. LLF: -610.750644005301
Iteration: 5, Func. Count: 45, Neg. LLF: -610.7626222134536
Iteration: 6, Func. Count: 53, Neg. LLF: -610.7730208119551
Iteration: 7, Func. Count: 61, Neg. LLF: -610.7732575091925
Iteration: 8, Func. Count: 68, Neg. LLF: -610.7738337387731
Iteration: 9, Func. Count: 75, Neg. LLF: -610.7739357057876
Iteration: 10, Func. Count: 80, Neg. LLF: -610.7739792439254
Optimization terminated successfully. (Exit mode 0)
Current function value: -610.7739792439181
Iterations: 11
Function evaluations: 80
Gradient evaluations: 10
Iteration: 1, Func. Count: 5, Neg. LLF: -613.5968116463292
Iteration: 2, Func. Count: 17, Neg. LLF: -613.6778194452781
Iteration: 3, Func. Count: 24, Neg. LLF: -613.684143715805
Iteration: 4, Func. Count: 32, Neg. LLF: -613.6842157707177
Iteration: 5, Func. Count: 40, Neg. LLF: -613.6842267227273
Iteration: 6, Func. Count: 45, Neg. LLF: -613.6842355055073
Iteration: 7, Func. Count: 50, Neg. LLF: -613.684238501849
Optimization terminated successfully. (Exit mode 0)
Current function value: -613.684238501849
Iterations: 7
Function evaluations: 50
Gradient evaluations: 7
Iteration: 1, Func. Count: 5, Neg. LLF: -614.2075052760024
Iteration: 2, Func. Count: 17, Neg. LLF: -614.2591432876859
/Users/austincostello1/anaconda3/lib/python3.6/site-packages/arch/univariate/base.py:522: ConvergenceWarning: The optimizer returned code 8. The message is: Positive directional derivative for linesearch See scipy.optimize.fmin_slsqp for code meaning. ConvergenceWarning) /Users/austincostello1/anaconda3/lib/python3.6/site-packages/arch/univariate/base.py:522: ConvergenceWarning: The optimizer returned code 8. The message is: Positive directional derivative for linesearch See scipy.optimize.fmin_slsqp for code meaning. ConvergenceWarning) /Users/austincostello1/anaconda3/lib/python3.6/site-packages/arch/univariate/base.py:522: ConvergenceWarning: The optimizer returned code 8. The message is: Positive directional derivative for linesearch See scipy.optimize.fmin_slsqp for code meaning. ConvergenceWarning)
Positive directional derivative for linesearch (Exit mode 8)
Current function value: -614.2591432866368
Iterations: 6
Function evaluations: 17
Gradient evaluations: 2
Iteration: 1, Func. Count: 5, Neg. LLF: -615.2727251546045
Iteration: 2, Func. Count: 17, Neg. LLF: -615.3494199599108
Positive directional derivative for linesearch (Exit mode 8)
Current function value: -615.3494199328121
Iterations: 6
Function evaluations: 17
Gradient evaluations: 2
Iteration: 1, Func. Count: 5, Neg. LLF: -617.1716915120267
Iteration: 2, Func. Count: 17, Neg. LLF: -617.2316960729258
Iteration: 3, Func. Count: 24, Neg. LLF: -617.2382650710667
Iteration: 4, Func. Count: 36, Neg. LLF: -617.2382683829601
Iteration: 5, Func. Count: 44, Neg. LLF: -617.2391035081997
Iteration: 6, Func. Count: 51, Neg. LLF: -617.2397444843613
Iteration: 7, Func. Count: 59, Neg. LLF: -617.2402542717095
Iteration: 8, Func. Count: 64, Neg. LLF: -617.240353707271
Optimization terminated successfully. (Exit mode 0)
Current function value: -617.2403537072072
Iterations: 9
Function evaluations: 64
Gradient evaluations: 8
Iteration: 1, Func. Count: 5, Neg. LLF: -619.0511564573239
Iteration: 2, Func. Count: 17, Neg. LLF: -619.0872827412315
Positive directional derivative for linesearch (Exit mode 8)
Current function value: -619.0872827371422
Iterations: 6
Function evaluations: 17
Gradient evaluations: 2
Iteration: 1, Func. Count: 5, Neg. LLF: -620.3426656123836
Iteration: 2, Func. Count: 17, Neg. LLF: -620.4054450751073
Positive directional derivative for linesearch (Exit mode 8)
Current function value: -620.4054450773532
Iterations: 6
Function evaluations: 17
Gradient evaluations: 2
/Users/austincostello1/anaconda3/lib/python3.6/site-packages/arch/univariate/base.py:522: ConvergenceWarning: The optimizer returned code 8. The message is: Positive directional derivative for linesearch See scipy.optimize.fmin_slsqp for code meaning. ConvergenceWarning) /Users/austincostello1/anaconda3/lib/python3.6/site-packages/arch/univariate/base.py:522: ConvergenceWarning: The optimizer returned code 8. The message is: Positive directional derivative for linesearch See scipy.optimize.fmin_slsqp for code meaning. ConvergenceWarning)
Iteration: 1, Func. Count: 5, Neg. LLF: -619.8306775748156
Iteration: 2, Func. Count: 17, Neg. LLF: -619.8837892868178
Iteration: 3, Func. Count: 25, Neg. LLF: -619.8841605001817
Positive directional derivative for linesearch (Exit mode 8)
Current function value: -619.8841604755527
Iterations: 7
Function evaluations: 25
Gradient evaluations: 3
Iteration: 1, Func. Count: 5, Neg. LLF: -620.4980457044692
Iteration: 2, Func. Count: 17, Neg. LLF: -620.5749776244415
Iteration: 3, Func. Count: 24, Neg. LLF: -620.5803643512418
Iteration: 4, Func. Count: 36, Neg. LLF: -620.5803725468893
Iteration: 5, Func. Count: 45, Neg. LLF: -620.5811630182047
Iteration: 6, Func. Count: 52, Neg. LLF: -620.581738940366
Iteration: 7, Func. Count: 57, Neg. LLF: -620.5818761290132
Iteration: 8, Func. Count: 63, Neg. LLF: -620.5819304805872
Iteration: 9, Func. Count: 68, Neg. LLF: -620.5819389082449
Optimization terminated successfully. (Exit mode 0)
Current function value: -620.5819389082658
Iterations: 10
Function evaluations: 68
Gradient evaluations: 9
Iteration: 1, Func. Count: 5, Neg. LLF: -623.0688146579627
Iteration: 2, Func. Count: 17, Neg. LLF: -623.116559373877
Iteration: 3, Func. Count: 24, Neg. LLF: -623.1215837449461
Iteration: 4, Func. Count: 31, Neg. LLF: -623.1258628742472
Iteration: 5, Func. Count: 39, Neg. LLF: -623.1270878312168
Iteration: 6, Func. Count: 47, Neg. LLF: -623.1271103909546
Iteration: 7, Func. Count: 52, Neg. LLF: -623.1271334282128
Optimization terminated successfully. (Exit mode 0)
Current function value: -623.1271342114496
Iterations: 7
Function evaluations: 53
Gradient evaluations: 7
Iteration: 1, Func. Count: 5, Neg. LLF: -627.0125057055754
Iteration: 2, Func. Count: 17, Neg. LLF: -627.032109408096
Positive directional derivative for linesearch (Exit mode 8)
Current function value: -627.0321094311146
Iterations: 6
Function evaluations: 17
Gradient evaluations: 2
/Users/austincostello1/anaconda3/lib/python3.6/site-packages/arch/univariate/base.py:522: ConvergenceWarning: The optimizer returned code 8. The message is: Positive directional derivative for linesearch See scipy.optimize.fmin_slsqp for code meaning. ConvergenceWarning) /Users/austincostello1/anaconda3/lib/python3.6/site-packages/arch/univariate/base.py:522: ConvergenceWarning: The optimizer returned code 8. The message is: Positive directional derivative for linesearch See scipy.optimize.fmin_slsqp for code meaning. ConvergenceWarning)
Iteration: 1, Func. Count: 5, Neg. LLF: -629.0680838602498
Iteration: 2, Func. Count: 17, Neg. LLF: -629.0691956440855
Iteration: 3, Func. Count: 25, Neg. LLF: -629.0877123709606
Iteration: 4, Func. Count: 34, Neg. LLF: -629.0910328967559
Iteration: 5, Func. Count: 41, Neg. LLF: -629.103471433813
Iteration: 6, Func. Count: 49, Neg. LLF: -629.1035693510805
Iteration: 7, Func. Count: 54, Neg. LLF: -629.1035744040937
Optimization terminated successfully. (Exit mode 0)
Current function value: -629.1035744040705
Iterations: 7
Function evaluations: 54
Gradient evaluations: 7
Iteration: 1, Func. Count: 5, Neg. LLF: -631.3510714324575
Iteration: 2, Func. Count: 17, Neg. LLF: -631.3720742739858
Iteration: 3, Func. Count: 24, Neg. LLF: -631.3855347649502
Iteration: 4, Func. Count: 32, Neg. LLF: -631.3879093547216
Iteration: 5, Func. Count: 39, Neg. LLF: -631.3888376215843
Iteration: 6, Func. Count: 47, Neg. LLF: -631.3889599732358
Iteration: 7, Func. Count: 53, Neg. LLF: -631.3893557034219
Iteration: 8, Func. Count: 58, Neg. LLF: -631.389358089727
Optimization terminated successfully. (Exit mode 0)
Current function value: -631.389358250292
Iterations: 8
Function evaluations: 60
Gradient evaluations: 8
Iteration: 1, Func. Count: 5, Neg. LLF: -631.4803602679428
Iteration: 2, Func. Count: 17, Neg. LLF: -631.4970874435846
Positive directional derivative for linesearch (Exit mode 8)
Current function value: -631.4970874420294
Iterations: 6
Function evaluations: 17
Gradient evaluations: 2
Iteration: 1, Func. Count: 5, Neg. LLF: -634.3748110202539
Iteration: 2, Func. Count: 17, Neg. LLF: -634.3782095132709
Positive directional derivative for linesearch (Exit mode 8)
Current function value: -634.3782094926582
Iterations: 6
Function evaluations: 17
Gradient evaluations: 2
/Users/austincostello1/anaconda3/lib/python3.6/site-packages/arch/univariate/base.py:522: ConvergenceWarning: The optimizer returned code 8. The message is: Positive directional derivative for linesearch See scipy.optimize.fmin_slsqp for code meaning. ConvergenceWarning)
Iteration: 1, Func. Count: 5, Neg. LLF: -636.7994615638936
Iteration: 2, Func. Count: 17, Neg. LLF: -636.8044049421646
Iteration: 3, Func. Count: 24, Neg. LLF: -636.8088132220439
Iteration: 4, Func. Count: 32, Neg. LLF: -636.8089484443628
Iteration: 5, Func. Count: 39, Neg. LLF: -636.8116373021787
Iteration: 6, Func. Count: 47, Neg. LLF: -636.811644397237
Iteration: 7, Func. Count: 52, Neg. LLF: -636.8116569517861
Optimization terminated successfully. (Exit mode 0)
Current function value: -636.8116569516759
Iterations: 7
Function evaluations: 52
Gradient evaluations: 7
Iteration: 1, Func. Count: 5, Neg. LLF: -638.5677323025056
Iteration: 2, Func. Count: 17, Neg. LLF: -638.5816064685434
Iteration: 3, Func. Count: 24, Neg. LLF: -638.5821988516706
Iteration: 4, Func. Count: 31, Neg. LLF: -638.5826846495938
Optimization terminated successfully. (Exit mode 0)
Current function value: -638.5826846879975
Iterations: 4
Function evaluations: 38
Gradient evaluations: 4
Iteration: 1, Func. Count: 5, Neg. LLF: -638.294231296808
Iteration: 2, Func. Count: 17, Neg. LLF: -638.3045491417627
Iteration: 3, Func. Count: 25, Neg. LLF: -638.3046688888692
Iteration: 4, Func. Count: 33, Neg. LLF: -638.3050334436416
Optimization terminated successfully. (Exit mode 0)
Current function value: -638.3050334434604
Iterations: 4
Function evaluations: 44
Gradient evaluations: 4
Iteration: 1, Func. Count: 5, Neg. LLF: -639.693834623977
Iteration: 2, Func. Count: 17, Neg. LLF: -639.6938424701516
Positive directional derivative for linesearch (Exit mode 8)
Current function value: -639.6938424698172
Iterations: 6
Function evaluations: 17
Gradient evaluations: 2
/Users/austincostello1/anaconda3/lib/python3.6/site-packages/arch/univariate/base.py:522: ConvergenceWarning: The optimizer returned code 8. The message is: Positive directional derivative for linesearch See scipy.optimize.fmin_slsqp for code meaning. ConvergenceWarning) /Users/austincostello1/anaconda3/lib/python3.6/site-packages/arch/univariate/base.py:522: ConvergenceWarning: The optimizer returned code 8. The message is: Positive directional derivative for linesearch See scipy.optimize.fmin_slsqp for code meaning. ConvergenceWarning) /Users/austincostello1/anaconda3/lib/python3.6/site-packages/arch/univariate/base.py:522: ConvergenceWarning: The optimizer returned code 8. The message is: Positive directional derivative for linesearch See scipy.optimize.fmin_slsqp for code meaning. ConvergenceWarning)
Iteration: 1, Func. Count: 5, Neg. LLF: -640.480080734946
Iteration: 2, Func. Count: 17, Neg. LLF: -640.5053390549131
Positive directional derivative for linesearch (Exit mode 8)
Current function value: -640.5053390315817
Iterations: 6
Function evaluations: 17
Gradient evaluations: 2
Iteration: 1, Func. Count: 5, Neg. LLF: -642.5658432732579
Iteration: 2, Func. Count: 17, Neg. LLF: -642.5658497539399
Positive directional derivative for linesearch (Exit mode 8)
Current function value: -642.5658497423126
Iterations: 6
Function evaluations: 17
Gradient evaluations: 2
Iteration: 1, Func. Count: 5, Neg. LLF: -644.0844340704798
Iteration: 2, Func. Count: 17, Neg. LLF: -644.0995548131893
Iteration: 3, Func. Count: 27, Neg. LLF: -644.0996550548036
Iteration: 4, Func. Count: 35, Neg. LLF: -644.1237333588982
Iteration: 5, Func. Count: 42, Neg. LLF: -644.1258345175358
Iteration: 6, Func. Count: 50, Neg. LLF: -644.1258369281272
Iteration: 7, Func. Count: 55, Neg. LLF: -644.1258486557357
Optimization terminated successfully. (Exit mode 0)
Current function value: -644.1258486557238
Iterations: 8
Function evaluations: 55
Gradient evaluations: 7
Iteration: 1, Func. Count: 5, Neg. LLF: -645.647757752295
Iteration: 2, Func. Count: 17, Neg. LLF: -645.6557808716944
Optimization terminated successfully. (Exit mode 0)
Current function value: -645.6557817404832
Iterations: 3
Function evaluations: 24
Gradient evaluations: 2
Iteration: 1, Func. Count: 5, Neg. LLF: -645.2846172889019
Iteration: 2, Func. Count: 17, Neg. LLF: -645.2957010475702
Optimization terminated successfully. (Exit mode 0)
Current function value: -645.2957017989463
Iterations: 3
Function evaluations: 24
Gradient evaluations: 2
Iteration: 1, Func. Count: 5, Neg. LLF: -646.1782413756127
Iteration: 2, Func. Count: 17, Neg. LLF: -646.1903577093565
Positive directional derivative for linesearch (Exit mode 8)
Current function value: -646.1903577057128
Iterations: 6
Function evaluations: 17
Gradient evaluations: 2
Iteration: 1, Func. Count: 5, Neg. LLF: -647.4938638541962
Iteration: 2, Func. Count: 17, Neg. LLF: -647.498349047951
Positive directional derivative for linesearch (Exit mode 8)
Current function value: -647.4983490286836
Iterations: 6
Function evaluations: 17
Gradient evaluations: 2
Iteration: 1, Func. Count: 5, Neg. LLF: -642.821059914249
Iteration: 2, Func. Count: 17, Neg. LLF: -642.8307335016195
Iteration: 3, Func. Count: 26, Neg. LLF: -642.8337992319202
Iteration: 4, Func. Count: 38, Neg. LLF: -642.836352874035
Iteration: 5, Func. Count: 47, Neg. LLF: -642.8403831400396
Iteration: 6, Func. Count: 56, Neg. LLF: -642.8466604648868
Iteration: 7, Func. Count: 65, Neg. LLF: -642.8482239956119
Iteration: 8, Func. Count: 73, Neg. LLF: -642.8485066004184
Optimization terminated successfully. (Exit mode 0)
Current function value: -642.8485066003894
Iterations: 9
Function evaluations: 84
Gradient evaluations: 8
/Users/austincostello1/anaconda3/lib/python3.6/site-packages/arch/univariate/base.py:522: ConvergenceWarning: The optimizer returned code 8. The message is: Positive directional derivative for linesearch See scipy.optimize.fmin_slsqp for code meaning. ConvergenceWarning) /Users/austincostello1/anaconda3/lib/python3.6/site-packages/arch/univariate/base.py:522: ConvergenceWarning: The optimizer returned code 8. The message is: Positive directional derivative for linesearch See scipy.optimize.fmin_slsqp for code meaning. ConvergenceWarning) /Users/austincostello1/anaconda3/lib/python3.6/site-packages/arch/univariate/base.py:522: ConvergenceWarning: The optimizer returned code 8. The message is: Positive directional derivative for linesearch See scipy.optimize.fmin_slsqp for code meaning. ConvergenceWarning)
Iteration: 1, Func. Count: 5, Neg. LLF: -642.6335994315144
Iteration: 2, Func. Count: 17, Neg. LLF: -642.6418876251796
Positive directional derivative for linesearch (Exit mode 8)
Current function value: -642.6418876186349
Iterations: 6
Function evaluations: 17
Gradient evaluations: 2
Iteration: 1, Func. Count: 5, Neg. LLF: -642.8521870250383
Iteration: 2, Func. Count: 17, Neg. LLF: -642.8599308729622
Positive directional derivative for linesearch (Exit mode 8)
Current function value: -642.8599308686639
Iterations: 6
Function evaluations: 17
Gradient evaluations: 2
Iteration: 1, Func. Count: 5, Neg. LLF: -642.9438354269907
Iteration: 2, Func. Count: 17, Neg. LLF: -642.9543714651497
Positive directional derivative for linesearch (Exit mode 8)
Current function value: -642.9543714613312
Iterations: 6
Function evaluations: 17
Gradient evaluations: 2
Iteration: 1, Func. Count: 5, Neg. LLF: -643.5062734867406
Iteration: 2, Func. Count: 17, Neg. LLF: -643.5075756098362
Iteration: 3, Func. Count: 26, Neg. LLF: -643.5110665369855
Iteration: 4, Func. Count: 38, Neg. LLF: -643.5137921235648
Iteration: 5, Func. Count: 46, Neg. LLF: -643.5229246554335
Iteration: 6, Func. Count: 56, Neg. LLF: -643.5231956642763
Iteration: 7, Func. Count: 63, Neg. LLF: -643.5258698330863
Iteration: 8, Func. Count: 71, Neg. LLF: -643.5259877783883
Iteration: 9, Func. Count: 76, Neg. LLF: -643.5259921185145
Optimization terminated successfully. (Exit mode 0)
Current function value: -643.5259921185038
Iterations: 10
Function evaluations: 76
Gradient evaluations: 9
/Users/austincostello1/anaconda3/lib/python3.6/site-packages/arch/univariate/base.py:522: ConvergenceWarning: The optimizer returned code 8. The message is: Positive directional derivative for linesearch See scipy.optimize.fmin_slsqp for code meaning. ConvergenceWarning) /Users/austincostello1/anaconda3/lib/python3.6/site-packages/arch/univariate/base.py:522: ConvergenceWarning: The optimizer returned code 8. The message is: Positive directional derivative for linesearch See scipy.optimize.fmin_slsqp for code meaning. ConvergenceWarning) /Users/austincostello1/anaconda3/lib/python3.6/site-packages/arch/univariate/base.py:522: ConvergenceWarning: The optimizer returned code 8. The message is: Positive directional derivative for linesearch See scipy.optimize.fmin_slsqp for code meaning. ConvergenceWarning)
Iteration: 1, Func. Count: 5, Neg. LLF: -649.0926433968732
Iteration: 2, Func. Count: 17, Neg. LLF: -649.0930812570989
Positive directional derivative for linesearch (Exit mode 8)
Current function value: -649.0930812677888
Iterations: 6
Function evaluations: 17
Gradient evaluations: 2
Iteration: 1, Func. Count: 5, Neg. LLF: -648.5758566527763
Iteration: 2, Func. Count: 17, Neg. LLF: -648.5763935630093
Iteration: 3, Func. Count: 26, Neg. LLF: -648.5765555875469
Iteration: 4, Func. Count: 34, Neg. LLF: -648.5807497380806
Iteration: 5, Func. Count: 43, Neg. LLF: -648.582741620269
Positive directional derivative for linesearch (Exit mode 8)
Current function value: -648.5827416268519
Iterations: 9
Function evaluations: 43
Gradient evaluations: 5
Iteration: 1, Func. Count: 5, Neg. LLF: -648.506943642009
Iteration: 2, Func. Count: 17, Neg. LLF: -648.5070351213017
Positive directional derivative for linesearch (Exit mode 8)
Current function value: -648.5070351380034
Iterations: 6
Function evaluations: 17
Gradient evaluations: 2
Iteration: 1, Func. Count: 5, Neg. LLF: -648.2930080251049
Iteration: 2, Func. Count: 17, Neg. LLF: -648.2932433465019
Iteration: 3, Func. Count: 26, Neg. LLF: -648.293502662933
Iteration: 4, Func. Count: 38, Neg. LLF: -648.2937536634871
Iteration: 5, Func. Count: 46, Neg. LLF: -648.2950224504334
Iteration: 6, Func. Count: 55, Neg. LLF: -648.2961829256258
Iteration: 7, Func. Count: 63, Neg. LLF: -648.2963649802084
Iteration: 8, Func. Count: 71, Neg. LLF: -648.2978894572278
Iteration: 9, Func. Count: 83, Neg. LLF: -648.2983885044782
Positive directional derivative for linesearch (Exit mode 8)
Current function value: -648.2983885190224
Iterations: 13
Function evaluations: 83
Gradient evaluations: 9
/Users/austincostello1/anaconda3/lib/python3.6/site-packages/arch/univariate/base.py:522: ConvergenceWarning: The optimizer returned code 8. The message is: Positive directional derivative for linesearch See scipy.optimize.fmin_slsqp for code meaning. ConvergenceWarning) /Users/austincostello1/anaconda3/lib/python3.6/site-packages/arch/univariate/base.py:522: ConvergenceWarning: The optimizer returned code 8. The message is: Positive directional derivative for linesearch See scipy.optimize.fmin_slsqp for code meaning. ConvergenceWarning) /Users/austincostello1/anaconda3/lib/python3.6/site-packages/arch/univariate/base.py:522: ConvergenceWarning: The optimizer returned code 8. The message is: Positive directional derivative for linesearch See scipy.optimize.fmin_slsqp for code meaning. ConvergenceWarning)
Iteration: 1, Func. Count: 5, Neg. LLF: -648.6378861311113
Iteration: 2, Func. Count: 17, Neg. LLF: -648.6386211859137
Iteration: 3, Func. Count: 28, Neg. LLF: -648.6386250411597
Optimization terminated successfully. (Exit mode 0)
Current function value: -648.6386253064709
Iterations: 5
Function evaluations: 37
Gradient evaluations: 3
Iteration: 1, Func. Count: 5, Neg. LLF: -649.7141755670041
Iteration: 2, Func. Count: 17, Neg. LLF: -649.714918029051
Positive directional derivative for linesearch (Exit mode 8)
Current function value: -649.7149180266252
Iterations: 6
Function evaluations: 17
Gradient evaluations: 2
Iteration: 1, Func. Count: 5, Neg. LLF: -649.7918599565039
Iteration: 2, Func. Count: 17, Neg. LLF: -649.793160769481
Iteration: 3, Func. Count: 28, Neg. LLF: -649.7931660521697
Optimization terminated successfully. (Exit mode 0)
Current function value: -649.7931664001846
Iterations: 4
Function evaluations: 35
Gradient evaluations: 3
Iteration: 1, Func. Count: 5, Neg. LLF: -650.9178794505091
Iteration: 2, Func. Count: 17, Neg. LLF: -650.9258817587421
Optimization terminated successfully. (Exit mode 0)
Current function value: -650.9258819404431
Iterations: 3
Function evaluations: 25
Gradient evaluations: 2
Iteration: 1, Func. Count: 5, Neg. LLF: -654.0237968531264
Iteration: 2, Func. Count: 17, Neg. LLF: -654.0392017101393
Optimization terminated successfully. (Exit mode 0)
Current function value: -654.039202101796
Iterations: 3
Function evaluations: 25
Gradient evaluations: 2
/Users/austincostello1/anaconda3/lib/python3.6/site-packages/arch/univariate/base.py:522: ConvergenceWarning: The optimizer returned code 8. The message is: Positive directional derivative for linesearch See scipy.optimize.fmin_slsqp for code meaning. ConvergenceWarning)
PRED_1 = np.reshape(PRED, (589,6))
Let's check how the model is performing:
PRED_1[:,0]
plt.plot(ACTUAL, label='Actual')
plt.plot(PRED_1[:,0], label='Predicted')
plt.title("SPY Variance vs Predicted Variance from ARCH(1)")
plt.legend()
plt.savefig("arch.png", bbox_inches='tight')
rolling_ar_errors = measure_error(ACTUAL, PRED_1[:,0], label='RMSE ARCH(1)' )
RMSE = 0.00015760018121505748 RMSE_pcent = 1.9817494383790915 label = RMSE ARCH(1)
## Calculating PNL
dir = []
for i in range(len(PRED_1)-1):
for j in range(len(PRED_1[0])):
if ACTUAL[i] < PRED_1[i+1][j]:
dir.append(1)
else:
dir.append(-1)
dir1=np.reshape(dir, (588, 6))
act_vix_lst = df_data.loc[df_data.Date > '2014-07-29',["VIX","VIXCM30","VIXCM60","VIXCM90","VIXCM120","VIXCM150"]]
act_vix_lst = act_vix_lst.values
diff = np.diff(act_vix_lst, axis=0)
money_accumulated = np.multiply(diff,dir1)
PnL = np.sum(money_accumulated)
print(PnL)
-58.35501325999999
| model | PnL | RMSE |
|---|---|---|
| ARCH(1) | -58.3550 | 0.000157 |
| GARCH(1,1) | -78.4006 | 0.000153 |
| GARCH(2,1) | -99.9630 | 0.000152 |
| GARCH(2,2) | -91.0460 | 0.000150 |
| GARCH(3,2) | -95.5904 | 0.000152 |
While our RMSEs look quite strong, none of these models seem to result in a profitable strategy, so we will move onto try VECM models.
First let's take a look at the plotted data:
df_data.VIX.plot(label='VIX')
df_data.VIXCM30.plot(label='VIXCM30')
plt.legend()
<matplotlib.legend.Legend at 0x1c1dc3e160>
The two variables look cointegrated, but let's run a Johansen Test to be sure:
d = {'vix': df_data['VIX'],
'vixcm30':df_data['VIXCM30']
}
df = pd.DataFrame(d)
c = coint_johansen(df,1,63)
c
-------------------------------------------------- --> Trace Statistics variable statistic Crit-90% Crit-95% Crit-99% r = 0 39.5184 16.1619 18.3985 23.1485 r = 1 8.231 2.7055 3.8415 6.6349 -------------------------------------------------- --> Eigen Statistics variable statistic Crit-90% Crit-95% Crit-99% r = 0 31.2874 15.0006 17.1481 21.7465 r = 1 8.231 2.7055 3.8415 6.6349 -------------------------------------------------- eigenvectors: [[ 0.77065652 -0.08729355] [-0.83501063 0.23909376]] -------------------------------------------------- eigenvalues: [0.0108086 0.0028549] --------------------------------------------------
<johansen_test.Holder at 0x1c1d273940>
The test confirms they are cointegrated so we can build the model:
train,test = train_test_split(df_data, test_size=0.2, shuffle=False)
H = train[['VIX', 'VIXCM30']].dropna()
v = VECM(H, exog=train['SPY'],deterministic='li', k_ar_diff=20)
f = v.fit()
f.summary()
| coef | std err | z | P>|z| | [0.025 | 0.975] | |
|---|---|---|---|---|---|---|
| exog1 | -6.127e-05 | 0.000 | -0.151 | 0.880 | -0.001 | 0.001 |
| L1.VIX | -0.3103 | 0.042 | -7.325 | 0.000 | -0.393 | -0.227 |
| L1.VIXCM30 | 0.2802 | 0.088 | 3.169 | 0.002 | 0.107 | 0.453 |
| L2.VIX | -0.2080 | 0.043 | -4.863 | 0.000 | -0.292 | -0.124 |
| L2.VIXCM30 | 0.1658 | 0.089 | 1.872 | 0.061 | -0.008 | 0.339 |
| L3.VIX | -0.1166 | 0.043 | -2.718 | 0.007 | -0.201 | -0.033 |
| L3.VIXCM30 | 0.0563 | 0.089 | 0.635 | 0.525 | -0.117 | 0.230 |
| L4.VIX | -0.1284 | 0.043 | -2.989 | 0.003 | -0.213 | -0.044 |
| L4.VIXCM30 | 0.0175 | 0.089 | 0.197 | 0.843 | -0.156 | 0.191 |
| L5.VIX | 0.0449 | 0.043 | 1.045 | 0.296 | -0.039 | 0.129 |
| L5.VIXCM30 | -0.2715 | 0.089 | -3.066 | 0.002 | -0.445 | -0.098 |
| L6.VIX | -0.0783 | 0.043 | -1.825 | 0.068 | -0.162 | 0.006 |
| L6.VIXCM30 | 0.1179 | 0.089 | 1.330 | 0.183 | -0.056 | 0.292 |
| L7.VIX | -0.0661 | 0.043 | -1.543 | 0.123 | -0.150 | 0.018 |
| L7.VIXCM30 | -0.0227 | 0.089 | -0.256 | 0.798 | -0.196 | 0.151 |
| L8.VIX | 0.0640 | 0.043 | 1.495 | 0.135 | -0.020 | 0.148 |
| L8.VIXCM30 | -0.1825 | 0.089 | -2.062 | 0.039 | -0.356 | -0.009 |
| L9.VIX | 0.0832 | 0.043 | 1.944 | 0.052 | -0.001 | 0.167 |
| L9.VIXCM30 | -0.2264 | 0.089 | -2.555 | 0.011 | -0.400 | -0.053 |
| L10.VIX | 0.1476 | 0.043 | 3.450 | 0.001 | 0.064 | 0.231 |
| L10.VIXCM30 | -0.1111 | 0.089 | -1.252 | 0.211 | -0.285 | 0.063 |
| L11.VIX | 0.1314 | 0.043 | 3.065 | 0.002 | 0.047 | 0.215 |
| L11.VIXCM30 | -0.2368 | 0.089 | -2.670 | 0.008 | -0.411 | -0.063 |
| L12.VIX | -0.0393 | 0.043 | -0.918 | 0.359 | -0.123 | 0.045 |
| L12.VIXCM30 | 0.1105 | 0.089 | 1.245 | 0.213 | -0.063 | 0.284 |
| L13.VIX | -0.0114 | 0.043 | -0.267 | 0.789 | -0.095 | 0.072 |
| L13.VIXCM30 | -0.0027 | 0.089 | -0.030 | 0.976 | -0.177 | 0.171 |
| L14.VIX | 0.1181 | 0.042 | 2.787 | 0.005 | 0.035 | 0.201 |
| L14.VIXCM30 | -0.2756 | 0.089 | -3.112 | 0.002 | -0.449 | -0.102 |
| L15.VIX | 0.0855 | 0.042 | 2.023 | 0.043 | 0.003 | 0.168 |
| L15.VIXCM30 | -0.1531 | 0.089 | -1.730 | 0.084 | -0.327 | 0.020 |
| L16.VIX | 0.1011 | 0.042 | 2.393 | 0.017 | 0.018 | 0.184 |
| L16.VIXCM30 | -0.0786 | 0.088 | -0.888 | 0.375 | -0.252 | 0.095 |
| L17.VIX | 0.0745 | 0.042 | 1.772 | 0.076 | -0.008 | 0.157 |
| L17.VIXCM30 | -0.0325 | 0.088 | -0.368 | 0.713 | -0.206 | 0.141 |
| L18.VIX | 0.0057 | 0.042 | 0.136 | 0.892 | -0.076 | 0.087 |
| L18.VIXCM30 | -0.0607 | 0.088 | -0.689 | 0.491 | -0.233 | 0.112 |
| L19.VIX | 0.1579 | 0.041 | 3.845 | 0.000 | 0.077 | 0.238 |
| L19.VIXCM30 | -0.3513 | 0.088 | -4.007 | 0.000 | -0.523 | -0.179 |
| L20.VIX | 0.0813 | 0.040 | 2.047 | 0.041 | 0.003 | 0.159 |
| L20.VIXCM30 | -0.1109 | 0.087 | -1.278 | 0.201 | -0.281 | 0.059 |
| coef | std err | z | P>|z| | [0.025 | 0.975] | |
|---|---|---|---|---|---|---|
| exog1 | 0.0005 | 0.000 | 2.424 | 0.015 | 8.68e-05 | 0.001 |
| L1.VIX | -0.0867 | 0.020 | -4.446 | 0.000 | -0.125 | -0.048 |
| L1.VIXCM30 | 0.0775 | 0.041 | 1.904 | 0.057 | -0.002 | 0.157 |
| L2.VIX | -0.0532 | 0.020 | -2.704 | 0.007 | -0.092 | -0.015 |
| L2.VIXCM30 | 0.0270 | 0.041 | 0.662 | 0.508 | -0.053 | 0.107 |
| L3.VIX | -0.0202 | 0.020 | -1.022 | 0.307 | -0.059 | 0.019 |
| L3.VIXCM30 | -0.0199 | 0.041 | -0.488 | 0.626 | -0.100 | 0.060 |
| L4.VIX | -0.0497 | 0.020 | -2.511 | 0.012 | -0.088 | -0.011 |
| L4.VIXCM30 | 0.0118 | 0.041 | 0.290 | 0.772 | -0.068 | 0.092 |
| L5.VIX | 0.0002 | 0.020 | 0.012 | 0.990 | -0.039 | 0.039 |
| L5.VIXCM30 | -0.1161 | 0.041 | -2.849 | 0.004 | -0.196 | -0.036 |
| L6.VIX | -0.0098 | 0.020 | -0.498 | 0.618 | -0.049 | 0.029 |
| L6.VIXCM30 | 0.0058 | 0.041 | 0.143 | 0.886 | -0.074 | 0.086 |
| L7.VIX | -0.0090 | 0.020 | -0.454 | 0.650 | -0.048 | 0.030 |
| L7.VIXCM30 | -0.0396 | 0.041 | -0.970 | 0.332 | -0.120 | 0.040 |
| L8.VIX | 0.0070 | 0.020 | 0.357 | 0.721 | -0.032 | 0.046 |
| L8.VIXCM30 | -0.0480 | 0.041 | -1.179 | 0.239 | -0.128 | 0.032 |
| L9.VIX | 0.0188 | 0.020 | 0.952 | 0.341 | -0.020 | 0.057 |
| L9.VIXCM30 | -0.0691 | 0.041 | -1.694 | 0.090 | -0.149 | 0.011 |
| L10.VIX | 0.0602 | 0.020 | 3.058 | 0.002 | 0.022 | 0.099 |
| L10.VIXCM30 | -0.0520 | 0.041 | -1.273 | 0.203 | -0.132 | 0.028 |
| L11.VIX | 0.0457 | 0.020 | 2.314 | 0.021 | 0.007 | 0.084 |
| L11.VIXCM30 | -0.0869 | 0.041 | -2.127 | 0.033 | -0.167 | -0.007 |
| L12.VIX | -0.0147 | 0.020 | -0.744 | 0.457 | -0.053 | 0.024 |
| L12.VIXCM30 | 0.0169 | 0.041 | 0.415 | 0.678 | -0.063 | 0.097 |
| L13.VIX | -0.0245 | 0.020 | -1.246 | 0.213 | -0.063 | 0.014 |
| L13.VIXCM30 | 0.0353 | 0.041 | 0.863 | 0.388 | -0.045 | 0.115 |
| L14.VIX | 0.0320 | 0.020 | 1.639 | 0.101 | -0.006 | 0.070 |
| L14.VIXCM30 | -0.1189 | 0.041 | -2.916 | 0.004 | -0.199 | -0.039 |
| L15.VIX | 0.0244 | 0.019 | 1.255 | 0.209 | -0.014 | 0.063 |
| L15.VIXCM30 | -0.0405 | 0.041 | -0.993 | 0.321 | -0.120 | 0.039 |
| L16.VIX | 0.0622 | 0.019 | 3.199 | 0.001 | 0.024 | 0.100 |
| L16.VIXCM30 | -0.0757 | 0.041 | -1.859 | 0.063 | -0.156 | 0.004 |
| L17.VIX | 0.0355 | 0.019 | 1.835 | 0.067 | -0.002 | 0.073 |
| L17.VIXCM30 | -0.0265 | 0.041 | -0.652 | 0.515 | -0.106 | 0.053 |
| L18.VIX | 0.0274 | 0.019 | 1.427 | 0.154 | -0.010 | 0.065 |
| L18.VIXCM30 | -0.0672 | 0.041 | -1.657 | 0.097 | -0.147 | 0.012 |
| L19.VIX | 0.0711 | 0.019 | 3.759 | 0.000 | 0.034 | 0.108 |
| L19.VIXCM30 | -0.1493 | 0.040 | -3.698 | 0.000 | -0.228 | -0.070 |
| L20.VIX | 0.0561 | 0.018 | 3.067 | 0.002 | 0.020 | 0.092 |
| L20.VIXCM30 | -0.0777 | 0.040 | -1.944 | 0.052 | -0.156 | 0.001 |
| coef | std err | z | P>|z| | [0.025 | 0.975] | |
|---|---|---|---|---|---|---|
| ec1 | 0.0006 | 0.017 | 0.036 | 0.972 | -0.033 | 0.034 |
| coef | std err | z | P>|z| | [0.025 | 0.975] | |
|---|---|---|---|---|---|---|
| ec1 | 0.0329 | 0.008 | 4.181 | 0.000 | 0.017 | 0.048 |
| coef | std err | z | P>|z| | [0.025 | 0.975] | |
|---|---|---|---|---|---|---|
| beta.1 | 1.0000 | 0 | 0 | 0.000 | 1.000 | 1.000 |
| beta.2 | -1.0485 | 0.024 | -44.311 | 0.000 | -1.095 | -1.002 |
| lin_trend | 0.0003 | 0.000 | 0.605 | 0.545 | -0.001 | 0.001 |
Picking up residuals for VIXCM30 series from the model:
# getting the lag to remove residual information
resid_vixcm30 = []
for i in range(len(f.resid)):
resid_vixcm30 += [f.resid[i][1]]
plt.plot(resid_vixcm30)
[<matplotlib.lines.Line2D at 0x1c2392dc50>]
run_ljung_box(resid_vixcm30, 80)
Ljung-Box Test
No information is left in the residuals!
Now we can check how the model is performing:
H = train[['VIX', 'VIXCM30', 'SPY']].dropna()
res = []
for date, row in test.iterrows():
v = VECM(H[['VIX', 'VIXCM30']], exog=H['SPY'], deterministic='li', k_ar_diff=20)
f = v.fit()
res += [f.predict(steps=1, exog_fc=test['SPY'])[0][1]]
H = H.append(row[['VIX', 'VIXCM30', 'SPY']])
H = H[1:].dropna()
plot_df = pd.DataFrame()
plot_df['Date'] = test.iloc[:, 0].values
plot_df['VIXCM30_actual'] = test.iloc[:, 4].values
plot_df['VIXCM30_predicted'] = res
plot_df.VIXCM30_actual.plot()
plot_df.VIXCM30_predicted.plot()
plt.title("VIXCM30 prediction through VECM model")
plt.legend()
<matplotlib.legend.Legend at 0x1c23938b38>
Looking closely into a random sample of the predictions:
plot_df.VIXCM30_actual[-100:].plot()
plot_df.VIXCM30_predicted[-100:].plot()
plt.title("VIXCM30 prediction through VECM model")
plt.legend()
<matplotlib.legend.Legend at 0x1c24364ba8>
ar_error2 = measure_error(plot_df['VIXCM30_actual'].values, plot_df['VIXCM30_predicted'].values, label='VECM_30')
print(ar_error2)
RMSE = 0.8218564880183119
RMSE_pcent = 0.04658378560777323
label = VECM_30
{'RMSE': 0.8218564880183119, 'RMSE_pcent': 0.04658378560777323, 'label': 'VECM_30'}
make_money(plot_df.VIXCM30_actual, plot_df.VIXCM30_predicted)
Total profit or loss: 17.6314 Number of correct days: 310 total incorrect days: 278
First let's take a look at the plotted data:
df_data.VIX.plot(label='VIX')
df_data.VIXCM60.plot(label='VIXCM60')
plt.legend()
<matplotlib.legend.Legend at 0x1c20485e48>
The two variables look cointegrated, but let's run a Johansen Test to be sure:
d = {'vix': df_data['VIX'],
'vixcm60':df_data['VIXCM60']
}
df = pd.DataFrame(d)
c = coint_johansen(df,1,63)
c
-------------------------------------------------- --> Trace Statistics variable statistic Crit-90% Crit-95% Crit-99% r = 0 30.388 16.1619 18.3985 23.1485 r = 1 7.2609 2.7055 3.8415 6.6349 -------------------------------------------------- --> Eigen Statistics variable statistic Crit-90% Crit-95% Crit-99% r = 0 23.1271 15.0006 17.1481 21.7465 r = 1 7.2609 2.7055 3.8415 6.6349 -------------------------------------------------- eigenvectors: [[ 0.48077253 -0.10256322] [-0.52658657 0.27381262]] -------------------------------------------------- eigenvalues: [0.00800084 0.00251885] --------------------------------------------------
<johansen_test.Holder at 0x113008c50>
Now we can build the model:
H = train[['VIX', 'VIXCM60']].dropna()
v = VECM(H, exog=train['SPY'],deterministic='li', k_ar_diff=20)
f = v.fit()
f.summary()
| coef | std err | z | P>|z| | [0.025 | 0.975] | |
|---|---|---|---|---|---|---|
| exog1 | -0.0002 | 0.000 | -0.358 | 0.720 | -0.001 | 0.001 |
| L1.VIX | -0.3074 | 0.038 | -8.014 | 0.000 | -0.383 | -0.232 |
| L1.VIXCM60 | 0.3885 | 0.111 | 3.510 | 0.000 | 0.172 | 0.605 |
| L2.VIX | -0.1803 | 0.039 | -4.649 | 0.000 | -0.256 | -0.104 |
| L2.VIXCM60 | 0.1506 | 0.111 | 1.357 | 0.175 | -0.067 | 0.368 |
| L3.VIX | -0.0317 | 0.039 | -0.816 | 0.414 | -0.108 | 0.044 |
| L3.VIXCM60 | -0.1912 | 0.111 | -1.722 | 0.085 | -0.409 | 0.026 |
| L4.VIX | -0.0966 | 0.039 | -2.486 | 0.013 | -0.173 | -0.020 |
| L4.VIXCM60 | -0.0289 | 0.111 | -0.261 | 0.794 | -0.246 | 0.188 |
| L5.VIX | 0.0369 | 0.039 | 0.950 | 0.342 | -0.039 | 0.113 |
| L5.VIXCM60 | -0.3169 | 0.111 | -2.861 | 0.004 | -0.534 | -0.100 |
| L6.VIX | -0.0321 | 0.039 | -0.825 | 0.409 | -0.108 | 0.044 |
| L6.VIXCM60 | 0.0223 | 0.111 | 0.201 | 0.841 | -0.195 | 0.240 |
| L7.VIX | -0.0779 | 0.039 | -2.004 | 0.045 | -0.154 | -0.002 |
| L7.VIXCM60 | 0.0392 | 0.111 | 0.354 | 0.724 | -0.178 | 0.257 |
| L8.VIX | 0.0485 | 0.039 | 1.251 | 0.211 | -0.028 | 0.125 |
| L8.VIXCM60 | -0.1901 | 0.111 | -1.715 | 0.086 | -0.407 | 0.027 |
| L9.VIX | 0.0451 | 0.039 | 1.163 | 0.245 | -0.031 | 0.121 |
| L9.VIXCM60 | -0.1629 | 0.111 | -1.469 | 0.142 | -0.380 | 0.054 |
| L10.VIX | 0.1432 | 0.039 | 3.687 | 0.000 | 0.067 | 0.219 |
| L10.VIXCM60 | -0.1457 | 0.111 | -1.314 | 0.189 | -0.363 | 0.072 |
| L11.VIX | 0.0310 | 0.039 | 0.795 | 0.427 | -0.045 | 0.107 |
| L11.VIXCM60 | -0.0177 | 0.111 | -0.160 | 0.873 | -0.235 | 0.200 |
| L12.VIX | -0.0805 | 0.039 | -2.070 | 0.038 | -0.157 | -0.004 |
| L12.VIXCM60 | 0.2636 | 0.111 | 2.380 | 0.017 | 0.047 | 0.481 |
| L13.VIX | -0.0061 | 0.039 | -0.156 | 0.876 | -0.082 | 0.070 |
| L13.VIXCM60 | -0.0395 | 0.111 | -0.356 | 0.722 | -0.257 | 0.178 |
| L14.VIX | 0.1011 | 0.039 | 2.616 | 0.009 | 0.025 | 0.177 |
| L14.VIXCM60 | -0.3335 | 0.111 | -3.013 | 0.003 | -0.550 | -0.117 |
| L15.VIX | 0.0588 | 0.039 | 1.524 | 0.127 | -0.017 | 0.135 |
| L15.VIXCM60 | -0.1454 | 0.111 | -1.312 | 0.189 | -0.363 | 0.072 |
| L16.VIX | 0.1134 | 0.039 | 2.942 | 0.003 | 0.038 | 0.189 |
| L16.VIXCM60 | -0.1718 | 0.111 | -1.553 | 0.121 | -0.389 | 0.045 |
| L17.VIX | 0.0410 | 0.038 | 1.067 | 0.286 | -0.034 | 0.116 |
| L17.VIXCM60 | 0.0449 | 0.111 | 0.406 | 0.685 | -0.172 | 0.262 |
| L18.VIX | -0.0064 | 0.038 | -0.168 | 0.867 | -0.081 | 0.069 |
| L18.VIXCM60 | -0.0661 | 0.111 | -0.598 | 0.550 | -0.283 | 0.151 |
| L19.VIX | 0.1312 | 0.038 | 3.471 | 0.001 | 0.057 | 0.205 |
| L19.VIXCM60 | -0.3803 | 0.110 | -3.446 | 0.001 | -0.597 | -0.164 |
| L20.VIX | 0.0642 | 0.037 | 1.741 | 0.082 | -0.008 | 0.136 |
| L20.VIXCM60 | -0.1120 | 0.110 | -1.021 | 0.307 | -0.327 | 0.103 |
| coef | std err | z | P>|z| | [0.025 | 0.975] | |
|---|---|---|---|---|---|---|
| exog1 | 0.0003 | 0.000 | 2.344 | 0.019 | 5.55e-05 | 0.001 |
| L1.VIX | -0.0794 | 0.013 | -6.094 | 0.000 | -0.105 | -0.054 |
| L1.VIXCM60 | 0.1398 | 0.038 | 3.722 | 0.000 | 0.066 | 0.213 |
| L2.VIX | -0.0521 | 0.013 | -3.958 | 0.000 | -0.078 | -0.026 |
| L2.VIXCM60 | 0.0745 | 0.038 | 1.977 | 0.048 | 0.001 | 0.148 |
| L3.VIX | -0.0023 | 0.013 | -0.174 | 0.862 | -0.028 | 0.024 |
| L3.VIXCM60 | -0.0637 | 0.038 | -1.691 | 0.091 | -0.138 | 0.010 |
| L4.VIX | -0.0309 | 0.013 | -2.343 | 0.019 | -0.057 | -0.005 |
| L4.VIXCM60 | 0.0140 | 0.038 | 0.372 | 0.710 | -0.060 | 0.088 |
| L5.VIX | -0.0048 | 0.013 | -0.361 | 0.718 | -0.031 | 0.021 |
| L5.VIXCM60 | -0.0818 | 0.038 | -2.176 | 0.030 | -0.156 | -0.008 |
| L6.VIX | 0.0054 | 0.013 | 0.407 | 0.684 | -0.020 | 0.031 |
| L6.VIXCM60 | -0.0359 | 0.038 | -0.954 | 0.340 | -0.110 | 0.038 |
| L7.VIX | -0.0083 | 0.013 | -0.633 | 0.527 | -0.034 | 0.018 |
| L7.VIXCM60 | -0.0190 | 0.038 | -0.505 | 0.613 | -0.093 | 0.055 |
| L8.VIX | 0.0031 | 0.013 | 0.236 | 0.813 | -0.023 | 0.029 |
| L8.VIXCM60 | -0.0286 | 0.038 | -0.760 | 0.447 | -0.102 | 0.045 |
| L9.VIX | 0.0059 | 0.013 | 0.449 | 0.653 | -0.020 | 0.032 |
| L9.VIXCM60 | -0.0260 | 0.038 | -0.690 | 0.490 | -0.100 | 0.048 |
| L10.VIX | 0.0527 | 0.013 | 3.999 | 0.000 | 0.027 | 0.079 |
| L10.VIXCM60 | -0.0686 | 0.038 | -1.821 | 0.069 | -0.142 | 0.005 |
| L11.VIX | 0.0095 | 0.013 | 0.719 | 0.472 | -0.016 | 0.035 |
| L11.VIXCM60 | -0.0117 | 0.038 | -0.310 | 0.757 | -0.085 | 0.062 |
| L12.VIX | -0.0376 | 0.013 | -2.845 | 0.004 | -0.063 | -0.012 |
| L12.VIXCM60 | 0.0905 | 0.038 | 2.406 | 0.016 | 0.017 | 0.164 |
| L13.VIX | -0.0093 | 0.013 | -0.706 | 0.480 | -0.035 | 0.017 |
| L13.VIXCM60 | 0.0183 | 0.038 | 0.487 | 0.626 | -0.055 | 0.092 |
| L14.VIX | 0.0246 | 0.013 | 1.873 | 0.061 | -0.001 | 0.050 |
| L14.VIXCM60 | -0.1169 | 0.038 | -3.110 | 0.002 | -0.191 | -0.043 |
| L15.VIX | 0.0174 | 0.013 | 1.324 | 0.185 | -0.008 | 0.043 |
| L15.VIXCM60 | -0.0452 | 0.038 | -1.201 | 0.230 | -0.119 | 0.029 |
| L16.VIX | 0.0538 | 0.013 | 4.111 | 0.000 | 0.028 | 0.079 |
| L16.VIXCM60 | -0.0924 | 0.038 | -2.460 | 0.014 | -0.166 | -0.019 |
| L17.VIX | 0.0284 | 0.013 | 2.176 | 0.030 | 0.003 | 0.054 |
| L17.VIXCM60 | -0.0115 | 0.038 | -0.306 | 0.759 | -0.085 | 0.062 |
| L18.VIX | 0.0143 | 0.013 | 1.100 | 0.271 | -0.011 | 0.040 |
| L18.VIXCM60 | -0.0613 | 0.038 | -1.633 | 0.102 | -0.135 | 0.012 |
| L19.VIX | 0.0482 | 0.013 | 3.757 | 0.000 | 0.023 | 0.073 |
| L19.VIXCM60 | -0.1233 | 0.037 | -3.292 | 0.001 | -0.197 | -0.050 |
| L20.VIX | 0.0367 | 0.013 | 2.932 | 0.003 | 0.012 | 0.061 |
| L20.VIXCM60 | -0.0680 | 0.037 | -1.825 | 0.068 | -0.141 | 0.005 |
| coef | std err | z | P>|z| | [0.025 | 0.975] | |
|---|---|---|---|---|---|---|
| ec1 | -0.0033 | 0.012 | -0.278 | 0.781 | -0.027 | 0.020 |
| coef | std err | z | P>|z| | [0.025 | 0.975] | |
|---|---|---|---|---|---|---|
| ec1 | 0.0157 | 0.004 | 3.845 | 0.000 | 0.008 | 0.024 |
| coef | std err | z | P>|z| | [0.025 | 0.975] | |
|---|---|---|---|---|---|---|
| beta.1 | 1.0000 | 0 | 0 | 0.000 | 1.000 | 1.000 |
| beta.2 | -1.0669 | 0.040 | -26.702 | 0.000 | -1.145 | -0.989 |
| lin_trend | 0.0004 | 0.001 | 0.493 | 0.622 | -0.001 | 0.002 |
# getting the lag to remove residual information
resid_vixcm60 = []
for i in range(len(f.resid)):
resid_vixcm60 += [f.resid[i][1]]
plt.plot(resid_vixcm60)
[<matplotlib.lines.Line2D at 0x1c269f4c88>]
run_ljung_box(resid_vixcm60, 80)
Ljung-Box Test
Now we can check how the model is performing:
H = train[['VIX', 'VIXCM60', 'SPY']].dropna()
res = []
for date, row in test.iterrows():
v = VECM(H[['VIX', 'VIXCM60']], exog=H['SPY'], deterministic='li', k_ar_diff=20)
f = v.fit()
res += [f.predict(steps=1, exog_fc=test['SPY'])[0][1]]
H = H.append(row[['VIX', 'VIXCM60', 'SPY']])
H = H[1:].dropna()
plot_df = pd.DataFrame()
plot_df['Date'] = test.iloc[:, 0].values
plot_df['VIXCM60_actual'] = test.iloc[:, 5].values
plot_df['VIXCM60_predicted'] = res
plot_df.VIXCM60_actual.plot()
plot_df.VIXCM60_predicted.plot()
plt.title("VIXCM60 prediction through VECM model")
plt.legend()
<matplotlib.legend.Legend at 0x1c26cf44a8>
ar_error2 = measure_error(plot_df['VIXCM60_actual'].values, plot_df['VIXCM60_predicted'].values, label='VECM_60')
print(ar_error2)
RMSE = 0.619971615010414
RMSE_pcent = 0.03360636343771486
label = VECM_60
{'RMSE': 0.619971615010414, 'RMSE_pcent': 0.03360636343771486, 'label': 'VECM_60'}
make_money(plot_df.VIXCM60_actual, plot_df.VIXCM60_predicted)
Total profit or loss: -8.6022 Number of correct days: 301 total incorrect days: 287
First let's take a look at the plotted data:
df_data.VIX.plot(label='VIX')
df_data.VIXCM90.plot(label='VIXCM90')
plt.legend()
<matplotlib.legend.Legend at 0x1c26d31908>
The two variables look cointegrated, but let's run a Johansen Test to be sure:
d = {'vix': df_data['VIX'],
'vixcm90':df_data['VIXCM90']
}
df = pd.DataFrame(d)
c = coint_johansen(df,1,63)
c
-------------------------------------------------- --> Trace Statistics variable statistic Crit-90% Crit-95% Crit-99% r = 0 30.0345 16.1619 18.3985 23.1485 r = 1 7.0617 2.7055 3.8415 6.6349 -------------------------------------------------- --> Eigen Statistics variable statistic Crit-90% Crit-95% Crit-99% r = 0 22.9729 15.0006 17.1481 21.7465 r = 1 7.0617 2.7055 3.8415 6.6349 -------------------------------------------------- eigenvectors: [[ 0.38643582 -0.07550673] [-0.42911003 0.25519026]] -------------------------------------------------- eigenvalues: [0.00794771 0.00244981] --------------------------------------------------
<johansen_test.Holder at 0x1c26db4dd8>
Now we can build the model:
H = train[['VIX', 'VIXCM90']].dropna()
v = VECM(H, deterministic='li',exog=train['SPY'], k_ar_diff=20)
f = v.fit()
f.summary()
| coef | std err | z | P>|z| | [0.025 | 0.975] | |
|---|---|---|---|---|---|---|
| exog1 | -0.0003 | 0.000 | -0.687 | 0.492 | -0.001 | 0.001 |
| L1.VIX | -0.3300 | 0.036 | -9.224 | 0.000 | -0.400 | -0.260 |
| L1.VIXCM90 | 0.6012 | 0.123 | 4.897 | 0.000 | 0.361 | 0.842 |
| L2.VIX | -0.1274 | 0.036 | -3.499 | 0.000 | -0.199 | -0.056 |
| L2.VIXCM90 | -0.0456 | 0.123 | -0.370 | 0.711 | -0.287 | 0.196 |
| L3.VIX | -0.0130 | 0.036 | -0.356 | 0.722 | -0.084 | 0.059 |
| L3.VIXCM90 | -0.2710 | 0.123 | -2.197 | 0.028 | -0.513 | -0.029 |
| L4.VIX | -0.0855 | 0.036 | -2.346 | 0.019 | -0.157 | -0.014 |
| L4.VIXCM90 | -0.0476 | 0.123 | -0.386 | 0.700 | -0.289 | 0.194 |
| L5.VIX | 0.0202 | 0.036 | 0.553 | 0.580 | -0.051 | 0.092 |
| L5.VIXCM90 | -0.2812 | 0.123 | -2.282 | 0.023 | -0.523 | -0.040 |
| L6.VIX | -0.0125 | 0.037 | -0.341 | 0.733 | -0.084 | 0.059 |
| L6.VIXCM90 | -0.0312 | 0.123 | -0.253 | 0.800 | -0.273 | 0.211 |
| L7.VIX | -0.0775 | 0.037 | -2.120 | 0.034 | -0.149 | -0.006 |
| L7.VIXCM90 | 0.0171 | 0.124 | 0.138 | 0.890 | -0.225 | 0.259 |
| L8.VIX | 0.0228 | 0.037 | 0.623 | 0.533 | -0.049 | 0.094 |
| L8.VIXCM90 | -0.1166 | 0.123 | -0.945 | 0.345 | -0.359 | 0.125 |
| L9.VIX | 0.0387 | 0.037 | 1.059 | 0.290 | -0.033 | 0.110 |
| L9.VIXCM90 | -0.1416 | 0.123 | -1.147 | 0.251 | -0.383 | 0.100 |
| L10.VIX | 0.1620 | 0.037 | 4.434 | 0.000 | 0.090 | 0.234 |
| L10.VIXCM90 | -0.2312 | 0.124 | -1.871 | 0.061 | -0.473 | 0.011 |
| L11.VIX | 0.0099 | 0.037 | 0.269 | 0.788 | -0.062 | 0.082 |
| L11.VIXCM90 | 0.0697 | 0.123 | 0.565 | 0.572 | -0.172 | 0.312 |
| L12.VIX | -0.0693 | 0.037 | -1.890 | 0.059 | -0.141 | 0.003 |
| L12.VIXCM90 | 0.2686 | 0.123 | 2.180 | 0.029 | 0.027 | 0.510 |
| L13.VIX | 0.0069 | 0.037 | 0.188 | 0.851 | -0.065 | 0.079 |
| L13.VIXCM90 | -0.0892 | 0.123 | -0.724 | 0.469 | -0.331 | 0.153 |
| L14.VIX | 0.1127 | 0.036 | 3.099 | 0.002 | 0.041 | 0.184 |
| L14.VIXCM90 | -0.4459 | 0.123 | -3.623 | 0.000 | -0.687 | -0.205 |
| L15.VIX | 0.0487 | 0.036 | 1.340 | 0.180 | -0.023 | 0.120 |
| L15.VIXCM90 | -0.1032 | 0.123 | -0.839 | 0.401 | -0.344 | 0.138 |
| L16.VIX | 0.1249 | 0.036 | 3.450 | 0.001 | 0.054 | 0.196 |
| L16.VIXCM90 | -0.2495 | 0.123 | -2.035 | 0.042 | -0.490 | -0.009 |
| L17.VIX | 0.0165 | 0.036 | 0.457 | 0.648 | -0.054 | 0.087 |
| L17.VIXCM90 | 0.1683 | 0.123 | 1.372 | 0.170 | -0.072 | 0.409 |
| L18.VIX | -0.0113 | 0.036 | -0.314 | 0.754 | -0.082 | 0.059 |
| L18.VIXCM90 | -0.0790 | 0.123 | -0.645 | 0.519 | -0.319 | 0.161 |
| L19.VIX | 0.0603 | 0.036 | 1.696 | 0.090 | -0.009 | 0.130 |
| L19.VIXCM90 | -0.1572 | 0.123 | -1.283 | 0.200 | -0.397 | 0.083 |
| L20.VIX | 0.0730 | 0.035 | 2.111 | 0.035 | 0.005 | 0.141 |
| L20.VIXCM90 | -0.1793 | 0.121 | -1.478 | 0.140 | -0.417 | 0.059 |
| coef | std err | z | P>|z| | [0.025 | 0.975] | |
|---|---|---|---|---|---|---|
| exog1 | 0.0002 | 0.000 | 2.007 | 0.045 | 5.75e-06 | 0.000 |
| L1.VIX | -0.0638 | 0.010 | -6.222 | 0.000 | -0.084 | -0.044 |
| L1.VIXCM90 | 0.1643 | 0.035 | 4.670 | 0.000 | 0.095 | 0.233 |
| L2.VIX | -0.0338 | 0.010 | -3.240 | 0.001 | -0.054 | -0.013 |
| L2.VIXCM90 | 0.0292 | 0.035 | 0.827 | 0.408 | -0.040 | 0.099 |
| L3.VIX | -0.0015 | 0.010 | -0.143 | 0.887 | -0.022 | 0.019 |
| L3.VIXCM90 | -0.0551 | 0.035 | -1.559 | 0.119 | -0.124 | 0.014 |
| L4.VIX | -0.0194 | 0.010 | -1.858 | 0.063 | -0.040 | 0.001 |
| L4.VIXCM90 | -0.0007 | 0.035 | -0.019 | 0.985 | -0.070 | 0.069 |
| L5.VIX | -0.0146 | 0.010 | -1.396 | 0.163 | -0.035 | 0.006 |
| L5.VIXCM90 | -0.0317 | 0.035 | -0.898 | 0.369 | -0.101 | 0.038 |
| L6.VIX | 0.0214 | 0.010 | 2.047 | 0.041 | 0.001 | 0.042 |
| L6.VIXCM90 | -0.0777 | 0.035 | -2.196 | 0.028 | -0.147 | -0.008 |
| L7.VIX | -0.0067 | 0.010 | -0.639 | 0.523 | -0.027 | 0.014 |
| L7.VIXCM90 | -0.0095 | 0.035 | -0.268 | 0.788 | -0.079 | 0.060 |
| L8.VIX | 0.0007 | 0.010 | 0.066 | 0.947 | -0.020 | 0.021 |
| L8.VIXCM90 | -0.0172 | 0.035 | -0.487 | 0.626 | -0.087 | 0.052 |
| L9.VIX | -0.0029 | 0.010 | -0.280 | 0.779 | -0.023 | 0.018 |
| L9.VIXCM90 | 0.0013 | 0.035 | 0.038 | 0.970 | -0.068 | 0.071 |
| L10.VIX | 0.0516 | 0.010 | 4.924 | 0.000 | 0.031 | 0.072 |
| L10.VIXCM90 | -0.0845 | 0.035 | -2.387 | 0.017 | -0.154 | -0.015 |
| L11.VIX | -0.0018 | 0.011 | -0.176 | 0.861 | -0.022 | 0.019 |
| L11.VIXCM90 | 0.0332 | 0.035 | 0.938 | 0.348 | -0.036 | 0.103 |
| L12.VIX | -0.0262 | 0.011 | -2.496 | 0.013 | -0.047 | -0.006 |
| L12.VIXCM90 | 0.0836 | 0.035 | 2.366 | 0.018 | 0.014 | 0.153 |
| L13.VIX | -0.0018 | 0.011 | -0.173 | 0.863 | -0.022 | 0.019 |
| L13.VIXCM90 | 0.0104 | 0.035 | 0.295 | 0.768 | -0.059 | 0.080 |
| L14.VIX | 0.0206 | 0.010 | 1.978 | 0.048 | 0.000 | 0.041 |
| L14.VIXCM90 | -0.1097 | 0.035 | -3.109 | 0.002 | -0.179 | -0.041 |
| L15.VIX | 0.0100 | 0.010 | 0.965 | 0.335 | -0.010 | 0.030 |
| L15.VIXCM90 | -0.0229 | 0.035 | -0.650 | 0.516 | -0.092 | 0.046 |
| L16.VIX | 0.0469 | 0.010 | 4.516 | 0.000 | 0.027 | 0.067 |
| L16.VIXCM90 | -0.0989 | 0.035 | -2.815 | 0.005 | -0.168 | -0.030 |
| L17.VIX | 0.0104 | 0.010 | 1.003 | 0.316 | -0.010 | 0.031 |
| L17.VIXCM90 | 0.0302 | 0.035 | 0.858 | 0.391 | -0.039 | 0.099 |
| L18.VIX | 0.0134 | 0.010 | 1.304 | 0.192 | -0.007 | 0.034 |
| L18.VIXCM90 | -0.0710 | 0.035 | -2.021 | 0.043 | -0.140 | -0.002 |
| L19.VIX | 0.0223 | 0.010 | 2.186 | 0.029 | 0.002 | 0.042 |
| L19.VIXCM90 | -0.0504 | 0.035 | -1.436 | 0.151 | -0.119 | 0.018 |
| L20.VIX | 0.0403 | 0.010 | 4.063 | 0.000 | 0.021 | 0.060 |
| L20.VIXCM90 | -0.1196 | 0.035 | -3.438 | 0.001 | -0.188 | -0.051 |
| coef | std err | z | P>|z| | [0.025 | 0.975] | |
|---|---|---|---|---|---|---|
| ec1 | -0.0077 | 0.010 | -0.779 | 0.436 | -0.027 | 0.012 |
| coef | std err | z | P>|z| | [0.025 | 0.975] | |
|---|---|---|---|---|---|---|
| ec1 | 0.0096 | 0.003 | 3.397 | 0.001 | 0.004 | 0.015 |
| coef | std err | z | P>|z| | [0.025 | 0.975] | |
|---|---|---|---|---|---|---|
| beta.1 | 1.0000 | 0 | 0 | 0.000 | 1.000 | 1.000 |
| beta.2 | -1.0723 | 0.055 | -19.666 | 0.000 | -1.179 | -0.965 |
| lin_trend | 0.0005 | 0.001 | 0.446 | 0.656 | -0.002 | 0.002 |
# getting the lag to remove residual information
resid_vixcm90 = []
for i in range(len(f.resid)):
resid_vixcm90 += [f.resid[i][1]]
plt.plot(resid_vixcm90)
[<matplotlib.lines.Line2D at 0x1c2772deb8>]
run_ljung_box(resid_vixcm90, 80)
Ljung-Box Test
Now we can check how the model is performing:
H = train[['VIX', 'VIXCM90', 'SPY']].dropna()
res = []
for date, row in test.iterrows():
v = VECM(H[['VIX', 'VIXCM90']], exog=H['SPY'], deterministic='li', k_ar_diff=20)
f = v.fit()
res += [f.predict(steps=1, exog_fc=test['SPY'])[0][1]]
H = H.append(row[['VIX', 'VIXCM90', 'SPY']])
H = H[1:].dropna()
plot_df = pd.DataFrame()
plot_df['Date'] = test.iloc[:,0].values
plot_df['VIXCM90_actual'] = test.iloc[:, 6].values
plot_df['VIXCM90_predicted'] = res
plot_df.VIXCM90_actual.plot()
plot_df.VIXCM90_predicted.plot()
plt.title("VIXCM90 prediction through VECM model")
plt.legend()
<matplotlib.legend.Legend at 0x1c278c5400>
ar_error2 = measure_error(plot_df['VIXCM90_actual'].values, plot_df['VIXCM90_predicted'].values, label='VECM_90')
print(ar_error2)
RMSE = 0.5184302696563623
RMSE_pcent = 0.027288800658595697
label = VECM_90
{'RMSE': 0.5184302696563623, 'RMSE_pcent': 0.027288800658595697, 'label': 'VECM_90'}
make_money(plot_df.VIXCM90_actual, plot_df.VIXCM90_predicted)
Total profit or loss: -13.8828 Number of correct days: 286 total incorrect days: 302
First let's take a look at the plotted data:
df_data.VIX.plot(label='VIX')
df_data.VIXCM120.plot(label='VIXCM120')
plt.legend()
<matplotlib.legend.Legend at 0x1c27e4de10>
The two variables look cointegrated, but let's run a Johansen Test to be sure:
d = {'vix': df_data['VIX'],
'vixcm120':df_data['VIXCM120']
}
df = pd.DataFrame(d)
c = coint_johansen(df,1,63)
c
-------------------------------------------------- --> Trace Statistics variable statistic Crit-90% Crit-95% Crit-99% r = 0 26.3306 16.1619 18.3985 23.1485 r = 1 6.8779 2.7055 3.8415 6.6349 -------------------------------------------------- --> Eigen Statistics variable statistic Crit-90% Crit-95% Crit-99% r = 0 19.4527 15.0006 17.1481 21.7465 r = 1 6.8779 2.7055 3.8415 6.6349 -------------------------------------------------- eigenvectors: [[ 0.3257479 -0.08536839] [-0.35188173 0.27314798]] -------------------------------------------------- eigenvalues: [0.00673399 0.00238613] --------------------------------------------------
<johansen_test.Holder at 0x10bdc7e10>
Now we can build the model:
H = train[['VIX', 'VIXCM120']].dropna()
v = VECM(H, deterministic='li', exog=train['SPY'], k_ar_diff=20)
f = v.fit()
f.summary()
| coef | std err | z | P>|z| | [0.025 | 0.975] | |
|---|---|---|---|---|---|---|
| exog1 | -0.0005 | 0.000 | -1.090 | 0.276 | -0.001 | 0.000 |
| L1.VIX | -0.3271 | 0.033 | -10.048 | 0.000 | -0.391 | -0.263 |
| L1.VIXCM120 | 0.7426 | 0.124 | 5.999 | 0.000 | 0.500 | 0.985 |
| L2.VIX | -0.1192 | 0.033 | -3.579 | 0.000 | -0.185 | -0.054 |
| L2.VIXCM120 | -0.0553 | 0.125 | -0.443 | 0.657 | -0.300 | 0.189 |
| L3.VIX | -0.0321 | 0.033 | -0.964 | 0.335 | -0.097 | 0.033 |
| L3.VIXCM120 | -0.2460 | 0.125 | -1.972 | 0.049 | -0.490 | -0.002 |
| L4.VIX | -0.1220 | 0.033 | -3.658 | 0.000 | -0.187 | -0.057 |
| L4.VIXCM120 | 0.1987 | 0.125 | 1.593 | 0.111 | -0.046 | 0.443 |
| L5.VIX | 0.0409 | 0.033 | 1.223 | 0.221 | -0.025 | 0.106 |
| L5.VIXCM120 | -0.4333 | 0.125 | -3.475 | 0.001 | -0.678 | -0.189 |
| L6.VIX | 0.0269 | 0.033 | 0.805 | 0.421 | -0.039 | 0.092 |
| L6.VIXCM120 | -0.2042 | 0.125 | -1.635 | 0.102 | -0.449 | 0.041 |
| L7.VIX | -0.0694 | 0.033 | -2.074 | 0.038 | -0.135 | -0.004 |
| L7.VIXCM120 | 0.0257 | 0.125 | 0.205 | 0.837 | -0.219 | 0.271 |
| L8.VIX | 0.0205 | 0.033 | 0.615 | 0.539 | -0.045 | 0.086 |
| L8.VIXCM120 | -0.1645 | 0.125 | -1.317 | 0.188 | -0.409 | 0.080 |
| L9.VIX | 0.0229 | 0.033 | 0.685 | 0.494 | -0.043 | 0.088 |
| L9.VIXCM120 | -0.0369 | 0.125 | -0.295 | 0.768 | -0.282 | 0.208 |
| L10.VIX | 0.1667 | 0.033 | 4.995 | 0.000 | 0.101 | 0.232 |
| L10.VIXCM120 | -0.3038 | 0.125 | -2.434 | 0.015 | -0.548 | -0.059 |
| L11.VIX | 0.0147 | 0.034 | 0.437 | 0.662 | -0.051 | 0.080 |
| L11.VIXCM120 | 0.0343 | 0.125 | 0.275 | 0.783 | -0.210 | 0.279 |
| L12.VIX | -0.0732 | 0.033 | -2.184 | 0.029 | -0.139 | -0.008 |
| L12.VIXCM120 | 0.3736 | 0.125 | 2.994 | 0.003 | 0.129 | 0.618 |
| L13.VIX | 0.0361 | 0.033 | 1.079 | 0.281 | -0.030 | 0.102 |
| L13.VIXCM120 | -0.2547 | 0.125 | -2.037 | 0.042 | -0.500 | -0.010 |
| L14.VIX | 0.0898 | 0.033 | 2.701 | 0.007 | 0.025 | 0.155 |
| L14.VIXCM120 | -0.4045 | 0.125 | -3.241 | 0.001 | -0.649 | -0.160 |
| L15.VIX | 0.0180 | 0.033 | 0.542 | 0.588 | -0.047 | 0.083 |
| L15.VIXCM120 | 0.0403 | 0.125 | 0.322 | 0.747 | -0.204 | 0.285 |
| L16.VIX | 0.1174 | 0.033 | 3.548 | 0.000 | 0.053 | 0.182 |
| L16.VIXCM120 | -0.2830 | 0.124 | -2.274 | 0.023 | -0.527 | -0.039 |
| L17.VIX | 0.0158 | 0.033 | 0.478 | 0.633 | -0.049 | 0.081 |
| L17.VIXCM120 | 0.1993 | 0.125 | 1.600 | 0.110 | -0.045 | 0.443 |
| L18.VIX | -0.0484 | 0.033 | -1.469 | 0.142 | -0.113 | 0.016 |
| L18.VIXCM120 | 0.0854 | 0.125 | 0.685 | 0.493 | -0.159 | 0.330 |
| L19.VIX | 0.0501 | 0.033 | 1.536 | 0.125 | -0.014 | 0.114 |
| L19.VIXCM120 | -0.1705 | 0.124 | -1.370 | 0.171 | -0.414 | 0.073 |
| L20.VIX | 0.0710 | 0.032 | 2.240 | 0.025 | 0.009 | 0.133 |
| L20.VIXCM120 | -0.1641 | 0.123 | -1.332 | 0.183 | -0.406 | 0.077 |
| coef | std err | z | P>|z| | [0.025 | 0.975] | |
|---|---|---|---|---|---|---|
| exog1 | 0.0002 | 0.000 | 1.790 | 0.073 | -1.87e-05 | 0.000 |
| L1.VIX | -0.0467 | 0.008 | -5.542 | 0.000 | -0.063 | -0.030 |
| L1.VIXCM120 | 0.1360 | 0.032 | 4.240 | 0.000 | 0.073 | 0.199 |
| L2.VIX | -0.0291 | 0.009 | -3.369 | 0.001 | -0.046 | -0.012 |
| L2.VIXCM120 | 0.0370 | 0.032 | 1.146 | 0.252 | -0.026 | 0.100 |
| L3.VIX | -0.0107 | 0.009 | -1.242 | 0.214 | -0.028 | 0.006 |
| L3.VIXCM120 | -0.0474 | 0.032 | -1.468 | 0.142 | -0.111 | 0.016 |
| L4.VIX | -0.0248 | 0.009 | -2.870 | 0.004 | -0.042 | -0.008 |
| L4.VIXCM120 | 0.0532 | 0.032 | 1.648 | 0.099 | -0.010 | 0.117 |
| L5.VIX | -0.0048 | 0.009 | -0.556 | 0.578 | -0.022 | 0.012 |
| L5.VIXCM120 | -0.0798 | 0.032 | -2.470 | 0.014 | -0.143 | -0.016 |
| L6.VIX | 0.0228 | 0.009 | 2.630 | 0.009 | 0.006 | 0.040 |
| L6.VIXCM120 | -0.0925 | 0.032 | -2.859 | 0.004 | -0.156 | -0.029 |
| L7.VIX | 0.0002 | 0.009 | 0.020 | 0.984 | -0.017 | 0.017 |
| L7.VIXCM120 | -0.0177 | 0.032 | -0.546 | 0.585 | -0.081 | 0.046 |
| L8.VIX | -0.0004 | 0.009 | -0.043 | 0.966 | -0.017 | 0.017 |
| L8.VIXCM120 | -0.0228 | 0.032 | -0.705 | 0.481 | -0.086 | 0.041 |
| L9.VIX | -0.0007 | 0.009 | -0.076 | 0.939 | -0.018 | 0.016 |
| L9.VIXCM120 | -0.0008 | 0.032 | -0.026 | 0.979 | -0.064 | 0.063 |
| L10.VIX | 0.0393 | 0.009 | 4.543 | 0.000 | 0.022 | 0.056 |
| L10.VIXCM120 | -0.0607 | 0.032 | -1.877 | 0.061 | -0.124 | 0.003 |
| L11.VIX | 0.0045 | 0.009 | 0.522 | 0.602 | -0.012 | 0.022 |
| L11.VIXCM120 | 0.0263 | 0.032 | 0.813 | 0.416 | -0.037 | 0.090 |
| L12.VIX | -0.0189 | 0.009 | -2.175 | 0.030 | -0.036 | -0.002 |
| L12.VIXCM120 | 0.0922 | 0.032 | 2.851 | 0.004 | 0.029 | 0.156 |
| L13.VIX | 0.0061 | 0.009 | 0.702 | 0.483 | -0.011 | 0.023 |
| L13.VIXCM120 | -0.0146 | 0.032 | -0.452 | 0.651 | -0.078 | 0.049 |
| L14.VIX | 0.0194 | 0.009 | 2.254 | 0.024 | 0.003 | 0.036 |
| L14.VIXCM120 | -0.1080 | 0.032 | -3.340 | 0.001 | -0.171 | -0.045 |
| L15.VIX | 0.0059 | 0.009 | 0.682 | 0.495 | -0.011 | 0.023 |
| L15.VIXCM120 | 0.0272 | 0.032 | 0.841 | 0.401 | -0.036 | 0.091 |
| L16.VIX | 0.0367 | 0.009 | 4.282 | 0.000 | 0.020 | 0.054 |
| L16.VIXCM120 | -0.0892 | 0.032 | -2.767 | 0.006 | -0.152 | -0.026 |
| L17.VIX | 0.0109 | 0.009 | 1.277 | 0.201 | -0.006 | 0.028 |
| L17.VIXCM120 | 0.0309 | 0.032 | 0.957 | 0.339 | -0.032 | 0.094 |
| L18.VIX | 0.0019 | 0.009 | 0.225 | 0.822 | -0.015 | 0.019 |
| L18.VIXCM120 | -0.0118 | 0.032 | -0.366 | 0.714 | -0.075 | 0.051 |
| L19.VIX | 0.0122 | 0.008 | 1.448 | 0.148 | -0.004 | 0.029 |
| L19.VIXCM120 | -0.0513 | 0.032 | -1.590 | 0.112 | -0.114 | 0.012 |
| L20.VIX | 0.0284 | 0.008 | 3.460 | 0.001 | 0.012 | 0.045 |
| L20.VIXCM120 | -0.0975 | 0.032 | -3.052 | 0.002 | -0.160 | -0.035 |
| coef | std err | z | P>|z| | [0.025 | 0.975] | |
|---|---|---|---|---|---|---|
| ec1 | -0.0122 | 0.009 | -1.385 | 0.166 | -0.030 | 0.005 |
| coef | std err | z | P>|z| | [0.025 | 0.975] | |
|---|---|---|---|---|---|---|
| ec1 | 0.0068 | 0.002 | 2.986 | 0.003 | 0.002 | 0.011 |
| coef | std err | z | P>|z| | [0.025 | 0.975] | |
|---|---|---|---|---|---|---|
| beta.1 | 1.0000 | 0 | 0 | 0.000 | 1.000 | 1.000 |
| beta.2 | -1.0824 | 0.068 | -16.028 | 0.000 | -1.215 | -0.950 |
| lin_trend | 0.0006 | 0.001 | 0.472 | 0.637 | -0.002 | 0.003 |
# getting the lag to remove residual information
resid_vixcm120 = []
for i in range(len(f.resid)):
resid_vixcm120 += [f.resid[i][1]]
plt.plot(resid_vixcm120)
[<matplotlib.lines.Line2D at 0x1c204d5cc0>]
run_ljung_box(resid_vixcm120, 80)
Ljung-Box Test
Now we can check how the model is performing:
H = train[['VIX', 'VIXCM120', 'SPY']].dropna()
res = []
for date, row in test.iterrows():
v = VECM(H[['VIX', 'VIXCM120']], exog=H['SPY'], deterministic='li', k_ar_diff=20)
f = v.fit()
res += [f.predict(steps=1, exog_fc=test['SPY'])[0][1]]
H = H.append(row[['VIX', 'VIXCM120', 'SPY']])
H = H[1:].dropna()
plot_df = pd.DataFrame()
plot_df['Date'] = test.iloc[:,0].values
plot_df['VIXCM120_actual'] = test.iloc[:,7].values
plot_df['VIXCM120_predicted'] = res
plot_df.VIXCM120_actual.plot()
plot_df.VIXCM120_predicted.plot()
plt.title("VIXCM120 prediction through VECM model")
plt.legend()
<matplotlib.legend.Legend at 0x1c27983eb8>
ar_error2 = measure_error(plot_df['VIXCM120_actual'].values, plot_df['VIXCM120_predicted'].values, label='VECM')
print(ar_error2)
RMSE = 0.4581289208624565
RMSE_pcent = 0.023545101076493956
label = VECM
{'RMSE': 0.4581289208624565, 'RMSE_pcent': 0.023545101076493956, 'label': 'VECM'}
make_money(plot_df.VIXCM120_actual, plot_df.VIXCM120_predicted)
Total profit or loss: -7.5644 Number of correct days: 294 total incorrect days: 294
First let's take a look at the plotted data:
df_data.VIX.plot(label='VIX')
df_data.VIXCM150.plot(label='VIXCM150')
plt.legend()
<matplotlib.legend.Legend at 0x1c204a8fd0>
The two variables look cointegrated, but let's run a Johansen Test to be sure:
d = {'vix': df_data['VIX'],
'vixcm150':df_data['VIXCM150']
}
df = pd.DataFrame(d)
c = coint_johansen(df,1,63)
c
-------------------------------------------------- --> Trace Statistics variable statistic Crit-90% Crit-95% Crit-99% r = 0 25.2241 16.1619 18.3985 23.1485 r = 1 6.6399 2.7055 3.8415 6.6349 -------------------------------------------------- --> Eigen Statistics variable statistic Crit-90% Crit-95% Crit-99% r = 0 18.5842 15.0006 17.1481 21.7465 r = 1 6.6399 2.7055 3.8415 6.6349 -------------------------------------------------- eigenvectors: [[ 0.2736747 -0.08686034] [-0.28087418 0.27910247]] -------------------------------------------------- eigenvalues: [0.00643429 0.00230366] --------------------------------------------------
<johansen_test.Holder at 0x1c27c45470>
Now we can build the model:
H = train[['VIX', 'VIXCM150']].dropna()
v = VECM(H, deterministic='li', exog=train['SPY'], k_ar_diff=20)
f = v.fit()
f.summary()
| coef | std err | z | P>|z| | [0.025 | 0.975] | |
|---|---|---|---|---|---|---|
| exog1 | -0.0006 | 0.000 | -1.408 | 0.159 | -0.001 | 0.000 |
| L1.VIX | -0.2774 | 0.028 | -9.962 | 0.000 | -0.332 | -0.223 |
| L1.VIXCM150 | 0.6254 | 0.107 | 5.841 | 0.000 | 0.416 | 0.835 |
| L2.VIX | -0.1153 | 0.029 | -4.033 | 0.000 | -0.171 | -0.059 |
| L2.VIXCM150 | -0.0241 | 0.108 | -0.222 | 0.824 | -0.236 | 0.188 |
| L3.VIX | -0.0570 | 0.029 | -1.995 | 0.046 | -0.113 | -0.001 |
| L3.VIXCM150 | -0.1497 | 0.108 | -1.386 | 0.166 | -0.361 | 0.062 |
| L4.VIX | -0.1193 | 0.029 | -4.166 | 0.000 | -0.175 | -0.063 |
| L4.VIXCM150 | 0.2803 | 0.108 | 2.599 | 0.009 | 0.069 | 0.492 |
| L5.VIX | 0.0300 | 0.029 | 1.044 | 0.297 | -0.026 | 0.086 |
| L5.VIXCM150 | -0.4177 | 0.108 | -3.868 | 0.000 | -0.629 | -0.206 |
| L6.VIX | 0.0408 | 0.029 | 1.423 | 0.155 | -0.015 | 0.097 |
| L6.VIXCM150 | -0.3165 | 0.108 | -2.930 | 0.003 | -0.528 | -0.105 |
| L7.VIX | -0.0588 | 0.029 | -2.054 | 0.040 | -0.115 | -0.003 |
| L7.VIXCM150 | 0.0207 | 0.108 | 0.192 | 0.848 | -0.191 | 0.233 |
| L8.VIX | 0.0133 | 0.029 | 0.466 | 0.641 | -0.043 | 0.069 |
| L8.VIXCM150 | -0.1734 | 0.108 | -1.605 | 0.109 | -0.385 | 0.038 |
| L9.VIX | 0.0109 | 0.029 | 0.383 | 0.702 | -0.045 | 0.067 |
| L9.VIXCM150 | 0.0408 | 0.108 | 0.377 | 0.706 | -0.171 | 0.253 |
| L10.VIX | 0.1531 | 0.028 | 5.374 | 0.000 | 0.097 | 0.209 |
| L10.VIXCM150 | -0.2813 | 0.108 | -2.609 | 0.009 | -0.493 | -0.070 |
| L11.VIX | 0.0278 | 0.029 | 0.973 | 0.330 | -0.028 | 0.084 |
| L11.VIXCM150 | -0.0432 | 0.108 | -0.401 | 0.689 | -0.255 | 0.168 |
| L12.VIX | -0.0454 | 0.029 | -1.589 | 0.112 | -0.101 | 0.011 |
| L12.VIXCM150 | 0.3075 | 0.108 | 2.849 | 0.004 | 0.096 | 0.519 |
| L13.VIX | 0.0301 | 0.029 | 1.051 | 0.293 | -0.026 | 0.086 |
| L13.VIXCM150 | -0.2622 | 0.108 | -2.426 | 0.015 | -0.474 | -0.050 |
| L14.VIX | 0.0487 | 0.028 | 1.711 | 0.087 | -0.007 | 0.104 |
| L14.VIXCM150 | -0.2528 | 0.108 | -2.338 | 0.019 | -0.465 | -0.041 |
| L15.VIX | 0.0039 | 0.028 | 0.137 | 0.891 | -0.052 | 0.059 |
| L15.VIXCM150 | 0.0855 | 0.108 | 0.790 | 0.429 | -0.126 | 0.297 |
| L16.VIX | 0.0885 | 0.028 | 3.131 | 0.002 | 0.033 | 0.144 |
| L16.VIXCM150 | -0.2076 | 0.108 | -1.922 | 0.055 | -0.419 | 0.004 |
| L17.VIX | 0.0306 | 0.028 | 1.087 | 0.277 | -0.025 | 0.086 |
| L17.VIXCM150 | 0.1457 | 0.108 | 1.349 | 0.177 | -0.066 | 0.357 |
| L18.VIX | -0.0599 | 0.028 | -2.126 | 0.033 | -0.115 | -0.005 |
| L18.VIXCM150 | 0.1864 | 0.108 | 1.725 | 0.084 | -0.025 | 0.398 |
| L19.VIX | 0.0328 | 0.028 | 1.172 | 0.241 | -0.022 | 0.088 |
| L19.VIXCM150 | -0.1139 | 0.108 | -1.054 | 0.292 | -0.326 | 0.098 |
| L20.VIX | 0.0691 | 0.027 | 2.544 | 0.011 | 0.016 | 0.122 |
| L20.VIXCM150 | -0.1710 | 0.107 | -1.596 | 0.110 | -0.381 | 0.039 |
| coef | std err | z | P>|z| | [0.025 | 0.975] | |
|---|---|---|---|---|---|---|
| exog1 | 0.0002 | 0.000 | 1.751 | 0.080 | -2.25e-05 | 0.000 |
| L1.VIX | -0.0193 | 0.007 | -2.709 | 0.007 | -0.033 | -0.005 |
| L1.VIXCM150 | 0.0174 | 0.027 | 0.636 | 0.525 | -0.036 | 0.071 |
| L2.VIX | -0.0193 | 0.007 | -2.638 | 0.008 | -0.034 | -0.005 |
| L2.VIXCM150 | 0.0083 | 0.028 | 0.299 | 0.765 | -0.046 | 0.062 |
| L3.VIX | -0.0215 | 0.007 | -2.951 | 0.003 | -0.036 | -0.007 |
| L3.VIXCM150 | -0.0280 | 0.028 | -1.015 | 0.310 | -0.082 | 0.026 |
| L4.VIX | -0.0195 | 0.007 | -2.661 | 0.008 | -0.034 | -0.005 |
| L4.VIXCM150 | 0.0519 | 0.028 | 1.882 | 0.060 | -0.002 | 0.106 |
| L5.VIX | -0.0031 | 0.007 | -0.418 | 0.676 | -0.017 | 0.011 |
| L5.VIXCM150 | -0.0917 | 0.028 | -3.323 | 0.001 | -0.146 | -0.038 |
| L6.VIX | 0.0149 | 0.007 | 2.040 | 0.041 | 0.001 | 0.029 |
| L6.VIXCM150 | -0.0746 | 0.028 | -2.705 | 0.007 | -0.129 | -0.021 |
| L7.VIX | 0.0042 | 0.007 | 0.574 | 0.566 | -0.010 | 0.019 |
| L7.VIXCM150 | -0.0227 | 0.028 | -0.823 | 0.411 | -0.077 | 0.031 |
| L8.VIX | 0.0029 | 0.007 | 0.394 | 0.694 | -0.011 | 0.017 |
| L8.VIXCM150 | -0.0443 | 0.028 | -1.606 | 0.108 | -0.098 | 0.010 |
| L9.VIX | 0.0022 | 0.007 | 0.304 | 0.761 | -0.012 | 0.017 |
| L9.VIXCM150 | -0.0149 | 0.028 | -0.539 | 0.590 | -0.069 | 0.039 |
| L10.VIX | 0.0260 | 0.007 | 3.569 | 0.000 | 0.012 | 0.040 |
| L10.VIXCM150 | -0.0232 | 0.028 | -0.844 | 0.399 | -0.077 | 0.031 |
| L11.VIX | 0.0134 | 0.007 | 1.833 | 0.067 | -0.001 | 0.028 |
| L11.VIXCM150 | 0.0163 | 0.028 | 0.592 | 0.554 | -0.038 | 0.070 |
| L12.VIX | -0.0023 | 0.007 | -0.319 | 0.749 | -0.017 | 0.012 |
| L12.VIXCM150 | 0.0378 | 0.028 | 1.372 | 0.170 | -0.016 | 0.092 |
| L13.VIX | 0.0086 | 0.007 | 1.176 | 0.240 | -0.006 | 0.023 |
| L13.VIXCM150 | 0.0028 | 0.028 | 0.101 | 0.920 | -0.051 | 0.057 |
| L14.VIX | 0.0094 | 0.007 | 1.298 | 0.194 | -0.005 | 0.024 |
| L14.VIXCM150 | -0.0544 | 0.028 | -1.969 | 0.049 | -0.109 | -0.000 |
| L15.VIX | 0.0072 | 0.007 | 0.990 | 0.322 | -0.007 | 0.021 |
| L15.VIXCM150 | 0.0478 | 0.028 | 1.730 | 0.084 | -0.006 | 0.102 |
| L16.VIX | 0.0182 | 0.007 | 2.526 | 0.012 | 0.004 | 0.032 |
| L16.VIXCM150 | -0.0231 | 0.028 | -0.837 | 0.403 | -0.077 | 0.031 |
| L17.VIX | 0.0165 | 0.007 | 2.289 | 0.022 | 0.002 | 0.031 |
| L17.VIXCM150 | -0.0028 | 0.028 | -0.101 | 0.919 | -0.057 | 0.051 |
| L18.VIX | 0.0026 | 0.007 | 0.368 | 0.713 | -0.011 | 0.017 |
| L18.VIXCM150 | 0.0205 | 0.028 | 0.744 | 0.457 | -0.034 | 0.075 |
| L19.VIX | 0.0042 | 0.007 | 0.583 | 0.560 | -0.010 | 0.018 |
| L19.VIXCM150 | -0.0460 | 0.028 | -1.668 | 0.095 | -0.100 | 0.008 |
| L20.VIX | 0.0165 | 0.007 | 2.382 | 0.017 | 0.003 | 0.030 |
| L20.VIXCM150 | -0.0694 | 0.027 | -2.537 | 0.011 | -0.123 | -0.016 |
| coef | std err | z | P>|z| | [0.025 | 0.975] | |
|---|---|---|---|---|---|---|
| ec1 | -0.0149 | 0.008 | -1.891 | 0.059 | -0.030 | 0.001 |
| coef | std err | z | P>|z| | [0.025 | 0.975] | |
|---|---|---|---|---|---|---|
| ec1 | 0.0059 | 0.002 | 2.918 | 0.004 | 0.002 | 0.010 |
| coef | std err | z | P>|z| | [0.025 | 0.975] | |
|---|---|---|---|---|---|---|
| beta.1 | 1.0000 | 0 | 0 | 0.000 | 1.000 | 1.000 |
| beta.2 | -1.0963 | 0.087 | -12.665 | 0.000 | -1.266 | -0.927 |
| lin_trend | 0.0008 | 0.002 | 0.544 | 0.586 | -0.002 | 0.004 |
# getting the lag to remove residual information
resid_vixcm150 = []
for i in range(len(f.resid)):
resid_vixcm150 += [f.resid[i][1]]
plt.plot(resid_vixcm150)
[<matplotlib.lines.Line2D at 0x1c1ccde358>]
run_ljung_box(resid_vixcm150, 80)
Ljung-Box Test
Now we can check how the model is performing:
H = train[['VIX', 'VIXCM150', 'SPY']].dropna()
res = []
for date, row in test.iterrows():
v = VECM(H[['VIX', 'VIXCM150']], exog=H['SPY'], deterministic='li', k_ar_diff=20)
f = v.fit()
res += [f.predict(steps=1, exog_fc=test['SPY'])[0][1]]
H = H.append(row[['VIX', 'VIXCM150', 'SPY']])
H = H[1:].dropna()
plot_df = pd.DataFrame()
plot_df['Date'] = test.iloc[:, 0].values
plot_df['VIXCM150_actual'] = test.iloc[:,8].values
plot_df['VIXCM150_predicted'] = res
plot_df.VIXCM150_actual.plot()
plot_df.VIXCM150_predicted.plot()
plt.title("VIXCM150 prediction through VECM model")
plt.legend()
<matplotlib.legend.Legend at 0x1c27cc8748>
ar_error2 = measure_error(plot_df['VIXCM150_actual'].values, plot_df['VIXCM150_predicted'].values, label='VECM')
print(ar_error2)
RMSE = 0.39943060263751484
RMSE_pcent = 0.020057050701287188
label = VECM
{'RMSE': 0.39943060263751484, 'RMSE_pcent': 0.020057050701287188, 'label': 'VECM'}
make_money(plot_df.VIXCM150_actual, plot_df.VIXCM150_predicted)
Total profit or loss: -5.7534 Number of correct days: 290 total incorrect days: 298
| Model | RMSE | PnL |
|---|---|---|
| VIXCM30 | 0.8218564 | 17.6314 |
| VIXCM60 | 0.6199716 | -8.6022 |
| VIXCM90 | 0.5184302 | -13.8828 |
| VIXCM120 | 0.4581289 | -7.5644 |
| VIXCM150 | 0.3994306 | -5.7534 |
For our AR and ARMA models we will try to forecast the premium of each VIX future. Since the price of these futures is the VIX + a premium (cost of carry) we can create a variable for the premium by subtracting the VIX from the price of any given future. We do this for each future below. After running the model on these premium prices, we will add the most recent VIX price back to the premium to find the VIXCM future price for the next day.
df_data.set_index('Date', inplace= True)
df_data.head()
| SPY | RealVol | VIX | VIXCM30 | VIXCM60 | VIXCM90 | VIXCM120 | VIXCM150 | spy_ret | variance | |
|---|---|---|---|---|---|---|---|---|---|---|
| Date | ||||||||||
| 2005-03-23 | 92.578594 | 9.120305 | 14.06 | 14.506154 | 15.065909 | 15.454545 | 15.739375 | 15.955000 | NaN | NaN |
| 2005-03-24 | 92.689358 | 6.457921 | 13.42 | 14.225263 | 14.894773 | 15.242500 | 15.524375 | 15.768125 | 0.001196 | 6.802115e-07 |
| 2005-03-28 | 92.823878 | 5.578653 | 13.75 | 14.398649 | 14.993636 | 15.320909 | 15.586250 | 15.820625 | 0.001451 | 1.165579e-06 |
| 2005-03-29 | 92.206709 | 9.290422 | 14.49 | 14.931667 | 15.219773 | 15.458409 | 15.667813 | 15.864688 | -0.006649 | 4.928736e-05 |
| 2005-03-30 | 93.512316 | 7.313855 | 13.64 | 14.428571 | 15.091364 | 15.398182 | 15.600312 | 15.764375 | 0.014160 | 1.901057e-04 |
df_data['VIXCM30_Premium'] = (df_data['VIXCM30']-df_data['VIX'])
df_data['VIXCM60_Premium'] = (df_data['VIXCM60']-df_data['VIX'])
df_data['VIXCM90_Premium'] = (df_data['VIXCM90']-df_data['VIX'])
df_data['VIXCM120_Premium'] = (df_data['VIXCM120']-df_data['VIX'])
df_data['VIXCM150_Premium'] = (df_data['VIXCM150']-df_data['VIX'])
VIXCM30 Premium
df_data.VIXCM30_Premium.plot()
<matplotlib.axes._subplots.AxesSubplot at 0x1c19c1a828>
run_ad_fuller(df_data.VIXCM30_Premium)
ADF Statistic: -6.546746 p-value: 0.000000 Critical Values: 1%: -3.433 5%: -2.863 10%: -2.567 We reject the Null Hypothesis (H0) -- the time series is stationary
It is Stationary!
VIXCM60 Premium
df_data.VIXCM60_Premium.plot()
<matplotlib.axes._subplots.AxesSubplot at 0x1c19f4fa20>
run_ad_fuller(df_data.VIXCM60_Premium)
ADF Statistic: -5.490771 p-value: 0.000002 Critical Values: 1%: -3.433 5%: -2.863 10%: -2.567 We reject the Null Hypothesis (H0) -- the time series is stationary
It is Stationary!
VIXCM90 Premium
df_data.VIXCM90_Premium.plot()
<matplotlib.axes._subplots.AxesSubplot at 0x10acb0a20>
run_ad_fuller(df_data.VIXCM90_Premium)
ADF Statistic: -5.085105 p-value: 0.000015 Critical Values: 1%: -3.433 5%: -2.863 10%: -2.567 We reject the Null Hypothesis (H0) -- the time series is stationary
It is Stationary!
VIXCM120 Premium
df_data.VIXCM120_Premium.plot()
<matplotlib.axes._subplots.AxesSubplot at 0x1c19d34cc0>
run_ad_fuller(df_data.VIXCM120_Premium.dropna())
ADF Statistic: -4.742800 p-value: 0.000070 Critical Values: 1%: -3.433 5%: -2.863 10%: -2.567 We reject the Null Hypothesis (H0) -- the time series is stationary
It is Stationary!
VIXCM150 Premium
df_data.VIXCM150_Premium.plot()
<matplotlib.axes._subplots.AxesSubplot at 0x1c1d4eb908>
run_ad_fuller(df_data.VIXCM150_Premium.dropna())
ADF Statistic: -4.555457 p-value: 0.000156 Critical Values: 1%: -3.433 5%: -2.863 10%: -2.567 We reject the Null Hypothesis (H0) -- the time series is stationary
It is Stationary!
First we will split the data into test and training data:
train,test = train_test_split(df_data, test_size=0.2, shuffle=False)
print('train: ',len(train))
print('test : ',len(test))
train.tail(5)
train: 2354 test : 589
| SPY | RealVol | VIX | VIXCM30 | VIXCM60 | VIXCM90 | VIXCM120 | VIXCM150 | spy_ret | variance | VIXCM30_Premium | VIXCM60_Premium | VIXCM90_Premium | VIXCM120_Premium | VIXCM150_Premium | |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
| Date | |||||||||||||||
| 2014-07-23 | 189.716544 | 3.895231 | 11.52 | 13.471053 | 14.396 | 15.210526 | 15.990909 | 16.771292 | 0.002220 | 3.416250e-06 | 1.951053 | 2.876 | 3.690526 | 4.470909 | 5.251292 |
| 2014-07-24 | 189.726090 | 3.964469 | 11.84 | 13.692105 | 14.578 | 15.310526 | 16.072727 | 16.834928 | 0.000050 | 1.032740e-07 | 1.852105 | 2.738 | 3.470526 | 4.232727 | 4.994928 |
| 2014-07-25 | 188.837875 | 5.437378 | 12.69 | 14.005263 | 14.806 | 15.476316 | 16.211364 | 16.946411 | -0.004682 | 2.553529e-05 | 1.315263 | 2.116 | 2.786316 | 3.521364 | 4.256411 |
| 2014-07-28 | 188.914283 | 6.551183 | 12.56 | 13.897368 | 14.726 | 15.368421 | 16.136364 | 16.904306 | 0.000405 | 1.085231e-09 | 1.337368 | 2.166 | 2.808421 | 3.576364 | 4.344306 |
| 2014-07-29 | 188.102462 | 6.450938 | 13.28 | 14.002632 | 14.700 | 15.331579 | 16.118182 | 16.904785 | -0.004297 | 2.179938e-05 | 0.722632 | 1.420 | 2.051579 | 2.838182 | 3.624785 |
Now we will look at the PACF plot of the premimums:
plot_pacf(train['VIXCM30_Premium'],lags=50)
plt.show()
# train autoregression
ar_model = AR(train['VIXCM30_Premium'])
Now we will check what order model is best from AIC and BIC scores:
aic = ar_model.select_order(maxlag=30,ic='aic',trend='nc')
bic = ar_model.select_order(maxlag=30,ic='bic',trend='nc')
print('model order (AIC): %d' % aic)
print('model order (BIC): %d' % bic)
model order (AIC): 30 model order (BIC): 8
Above we can see that the AIC score suggests 30 lags is the best, but this is the max lag we allowed it to use, so we will increase the max to see if it tells us something different.
aic = ar_model.select_order(maxlag=50,ic='aic',trend='nc')
bic = ar_model.select_order(maxlag=50,ic='bic',trend='nc')
print('model order (AIC): %d' % aic)
print('model order (BIC): %d' % bic)
model order (AIC): 49 model order (BIC): 8
We will try order =8 since it is the simplest model:
model_fit = ar_model.fit(maxlag=8)
print('Lag: %s' % model_fit.k_ar)
print('Coefficients: %s' % model_fit.params)
intercept = model_fit.params[0]
coeff = list(model_fit.params)[1:] # drop the intercept
coeff.reverse()
Lag: 8 Coefficients: const 0.040676 L1.VIXCM30_Premium 0.739705 L2.VIXCM30_Premium 0.066149 L3.VIXCM30_Premium 0.052504 L4.VIXCM30_Premium -0.003647 L5.VIXCM30_Premium 0.110634 L6.VIXCM30_Premium -0.049825 L7.VIXCM30_Premium -0.042842 L8.VIXCM30_Premium 0.089621 dtype: float64
print('RMSE (in sample):',np.sqrt(np.mean(model_fit.sigma2)))
RMSE (in sample): 1.191782092547236
We will check to make sure that there is no information left in the residuals and that they are stationary:
run_ljung_box(model_fit.resid, lags=100)
Ljung-Box Test lag: 10 | p_value: 0.0275 | still has information lag: 11 | p_value: 0.0354 | still has information lag: 12 | p_value: 0.0318 | still has information lag: 13 | p_value: 0.0450 | still has information lag: 14 | p_value: 0.0258 | still has information lag: 15 | p_value: 0.0349 | still has information lag: 16 | p_value: 0.0264 | still has information lag: 17 | p_value: 0.0330 | still has information lag: 18 | p_value: 0.0098 | still has information lag: 19 | p_value: 0.0072 | still has information lag: 20 | p_value: 0.0106 | still has information lag: 21 | p_value: 0.0058 | still has information lag: 22 | p_value: 0.0078 | still has information lag: 23 | p_value: 0.0072 | still has information lag: 24 | p_value: 0.0072 | still has information lag: 25 | p_value: 0.0090 | still has information lag: 26 | p_value: 0.0087 | still has information lag: 27 | p_value: 0.0035 | still has information lag: 28 | p_value: 0.0049 | still has information lag: 29 | p_value: 0.0008 | still has information lag: 30 | p_value: 0.0005 | still has information lag: 31 | p_value: 0.0004 | still has information lag: 32 | p_value: 0.0005 | still has information lag: 33 | p_value: 0.0001 | still has information lag: 34 | p_value: 0.0000 | still has information lag: 35 | p_value: 0.0000 | still has information lag: 36 | p_value: 0.0000 | still has information lag: 37 | p_value: 0.0000 | still has information lag: 38 | p_value: 0.0000 | still has information lag: 39 | p_value: 0.0000 | still has information lag: 40 | p_value: 0.0000 | still has information lag: 41 | p_value: 0.0000 | still has information lag: 42 | p_value: 0.0000 | still has information lag: 43 | p_value: 0.0000 | still has information lag: 44 | p_value: 0.0000 | still has information lag: 45 | p_value: 0.0000 | still has information lag: 46 | p_value: 0.0000 | still has information lag: 47 | p_value: 0.0000 | still has information lag: 48 | p_value: 0.0000 | still has information lag: 49 | p_value: 0.0000 | still has information lag: 50 | p_value: 0.0000 | still has information lag: 51 | p_value: 0.0000 | still has information lag: 52 | p_value: 0.0000 | still has information lag: 53 | p_value: 0.0000 | still has information lag: 54 | p_value: 0.0000 | still has information lag: 55 | p_value: 0.0000 | still has information lag: 56 | p_value: 0.0000 | still has information lag: 57 | p_value: 0.0000 | still has information lag: 58 | p_value: 0.0000 | still has information lag: 59 | p_value: 0.0000 | still has information lag: 60 | p_value: 0.0000 | still has information lag: 61 | p_value: 0.0000 | still has information lag: 62 | p_value: 0.0000 | still has information lag: 63 | p_value: 0.0000 | still has information lag: 64 | p_value: 0.0000 | still has information lag: 65 | p_value: 0.0000 | still has information lag: 66 | p_value: 0.0000 | still has information lag: 67 | p_value: 0.0000 | still has information lag: 68 | p_value: 0.0000 | still has information lag: 69 | p_value: 0.0000 | still has information lag: 70 | p_value: 0.0000 | still has information lag: 71 | p_value: 0.0000 | still has information lag: 72 | p_value: 0.0000 | still has information lag: 73 | p_value: 0.0000 | still has information lag: 74 | p_value: 0.0000 | still has information lag: 75 | p_value: 0.0000 | still has information lag: 76 | p_value: 0.0000 | still has information lag: 77 | p_value: 0.0000 | still has information lag: 78 | p_value: 0.0000 | still has information lag: 79 | p_value: 0.0000 | still has information lag: 80 | p_value: 0.0000 | still has information lag: 81 | p_value: 0.0000 | still has information lag: 82 | p_value: 0.0000 | still has information lag: 83 | p_value: 0.0000 | still has information lag: 84 | p_value: 0.0000 | still has information lag: 85 | p_value: 0.0000 | still has information lag: 86 | p_value: 0.0000 | still has information lag: 87 | p_value: 0.0000 | still has information lag: 88 | p_value: 0.0000 | still has information lag: 89 | p_value: 0.0000 | still has information lag: 90 | p_value: 0.0000 | still has information lag: 91 | p_value: 0.0000 | still has information lag: 92 | p_value: 0.0000 | still has information lag: 93 | p_value: 0.0000 | still has information lag: 94 | p_value: 0.0000 | still has information lag: 95 | p_value: 0.0000 | still has information lag: 96 | p_value: 0.0000 | still has information lag: 97 | p_value: 0.0000 | still has information lag: 98 | p_value: 0.0000 | still has information lag: 99 | p_value: 0.0000 | still has information lag: 100 | p_value: 0.0000 | still has information
Now we will try order =49 to see if we can caputure this missing information:
model_fit = ar_model.fit(maxlag=49)
print('Lag: %s' % model_fit.k_ar)
print('Coefficients: %s' % model_fit.params)
intercept = model_fit.params[0]
coeff = list(model_fit.params)[1:] # drop the intercept
coeff.reverse()
Lag: 49 Coefficients: const 0.047729 L1.VIXCM30_Premium 0.734760 L2.VIXCM30_Premium 0.073003 L3.VIXCM30_Premium 0.054725 L4.VIXCM30_Premium 0.006517 L5.VIXCM30_Premium 0.089636 L6.VIXCM30_Premium -0.042873 L7.VIXCM30_Premium -0.024546 L8.VIXCM30_Premium 0.086363 L9.VIXCM30_Premium 0.006226 L10.VIXCM30_Premium 0.077364 L11.VIXCM30_Premium -0.058672 L12.VIXCM30_Premium -0.046534 L13.VIXCM30_Premium 0.010955 L14.VIXCM30_Premium 0.045101 L15.VIXCM30_Premium -0.023944 L16.VIXCM30_Premium 0.019762 L17.VIXCM30_Premium -0.010226 L18.VIXCM30_Premium -0.065961 L19.VIXCM30_Premium 0.061991 L20.VIXCM30_Premium -0.034029 L21.VIXCM30_Premium -0.036764 L22.VIXCM30_Premium 0.041656 L23.VIXCM30_Premium -0.023595 L24.VIXCM30_Premium -0.014847 L25.VIXCM30_Premium 0.038173 L26.VIXCM30_Premium -0.023949 L27.VIXCM30_Premium 0.057672 L28.VIXCM30_Premium -0.028205 L29.VIXCM30_Premium -0.055332 L30.VIXCM30_Premium 0.073213 L31.VIXCM30_Premium 0.022460 L32.VIXCM30_Premium -0.031003 L33.VIXCM30_Premium -0.053781 L34.VIXCM30_Premium -0.021064 L35.VIXCM30_Premium 0.033376 L36.VIXCM30_Premium 0.013843 L37.VIXCM30_Premium 0.027811 L38.VIXCM30_Premium 0.023575 L39.VIXCM30_Premium -0.042689 L40.VIXCM30_Premium -0.013639 L41.VIXCM30_Premium -0.025635 L42.VIXCM30_Premium 0.014546 L43.VIXCM30_Premium -0.014341 L44.VIXCM30_Premium -0.006181 L45.VIXCM30_Premium 0.014273 L46.VIXCM30_Premium -0.003107 L47.VIXCM30_Premium 0.028452 L48.VIXCM30_Premium -0.033419 L49.VIXCM30_Premium 0.034756 dtype: float64
print('RMSE (in sample):',np.sqrt(np.mean(model_fit.sigma2)))
RMSE (in sample): 1.1719350672913336
We will check to make sure that there is no information left in the residuals and that they are stationary:
run_ljung_box(model_fit.resid, lags=100)
Ljung-Box Test
plot_pacf(model_fit.resid, lags=40) # PACF looks good
model_fit.resid.plot()
<matplotlib.axes._subplots.AxesSubplot at 0x1c1ce3e710>
run_ad_fuller(model_fit.resid)
ADF Statistic: -47.950249 p-value: 0.000000 Critical Values: 1%: -3.433 5%: -2.863 10%: -2.567 We reject the Null Hypothesis (H0) -- the time series is stationary
This model seems to caputure all the data so we will move forward.
Let's check to see how our model is performing on our test data:
H = train['VIXCM30_Premium'].tolist()
AR_ORDER = 49
AR_prem = []
AR_PRED=[]
VIX = []
ACTUAL_FUTURE=[]
ACTUAL_prem = []
n = 0
for d,row in train.tail(1).iterrows():
VIX +=[row['VIX']]
for d,row in test.iterrows():
ar_model = AR(H)
ar_fit = ar_model.fit(maxlag=AR_ORDER)
y_hat = ar_fit.predict(start=len(H), end=len(H) + 5)[0]
AR_prem += [y_hat]
AR_PRED+=[y_hat+VIX[-1]]
VIX+=[row['VIX']]
H += [row['VIXCM30_Premium']]
ACTUAL_prem += [row['VIXCM30_Premium']]
ACTUAL_FUTURE+=[row['VIXCM30']]
n += 1
# plotting the first 200 points
plt.plot(AR_PRED[:200], label='predicted')
plt.plot(ACTUAL_FUTURE[:200], color='b', label='actual')
plt.legend()
<matplotlib.legend.Legend at 0x1c2e7a2da0>
plot_confusion_matrix(ACTUAL_FUTURE, AR_PRED)
Confusion Matrix Raw [[176 132] [160 120]] Normalized [[0.29931973 0.2244898 ] [0.27210884 0.20408163]]
{'cm': array([[176, 132],
[160, 120]]), 'cm_norm': array([[0.29931973, 0.2244898 ],
[0.27210884, 0.20408163]])}
ar_error2 = measure_error(ACTUAL_FUTURE, AR_PRED, label='AR(49)')
print(ar_error2)
RMSE = 0.8488649610470695
RMSE_pcent = 0.048114657402919224
label = AR(49)
{'RMSE': 0.8488649610470695, 'RMSE_pcent': 0.048114657402919224, 'label': 'AR(49)'}
make_money(ACTUAL_FUTURE, AR_PRED)
Total profit or loss: 5.9271 Number of correct days: 298 total incorrect days: 290
First we will look at the PACF plot of the premimums:
plot_pacf(train['VIXCM60_Premium'],lags=50)
plt.show()
# train autoregression
ar_model = AR(train['VIXCM60_Premium'])
Now we will check what order model is best from AIC and BIC scores:
aic = ar_model.select_order(maxlag=30,ic='aic',trend='nc')
bic = ar_model.select_order(maxlag=30,ic='bic',trend='nc')
print('model order (AIC): %d' % aic)
print('model order (BIC): %d' % bic)
model order (AIC): 30 model order (BIC): 5
Above we can see that the AIC score suggests 30 lags is the best, but this is the max lag we allowed it to use, so we will increase the max to see if it tells us something different.
aic = ar_model.select_order(maxlag=50,ic='aic',trend='nc')
bic = ar_model.select_order(maxlag=50,ic='bic',trend='nc')
print('model order (AIC): %d' % aic)
print('model order (BIC): %d' % bic)
model order (AIC): 35 model order (BIC): 5
We will try order =5 since it is the simplest model:
model_fit = ar_model.fit(maxlag=5)
print('Lag: %s' % model_fit.k_ar)
print('Coefficients: %s' % model_fit.params)
intercept = model_fit.params[0]
coeff = list(model_fit.params)[1:] # drop the intercept
coeff.reverse()
Lag: 5 Coefficients: const 0.053017 L1.VIXCM60_Premium 0.769673 L2.VIXCM60_Premium 0.070797 L3.VIXCM60_Premium 0.064153 L4.VIXCM60_Premium -0.024588 L5.VIXCM60_Premium 0.089369 dtype: float64
print('RMSE (in sample):',np.sqrt(np.mean(model_fit.sigma2)))
RMSE (in sample): 1.3704552438612214
We will check to make sure that there is no information left in the residuals and that they are stationary:
run_ljung_box(model_fit.resid, lags=40)
Ljung-Box Test lag: 10 | p_value: 0.0001 | still has information lag: 11 | p_value: 0.0002 | still has information lag: 12 | p_value: 0.0002 | still has information lag: 13 | p_value: 0.0004 | still has information lag: 14 | p_value: 0.0002 | still has information lag: 15 | p_value: 0.0004 | still has information lag: 16 | p_value: 0.0002 | still has information lag: 17 | p_value: 0.0003 | still has information lag: 18 | p_value: 0.0001 | still has information lag: 19 | p_value: 0.0001 | still has information lag: 20 | p_value: 0.0002 | still has information lag: 21 | p_value: 0.0000 | still has information lag: 22 | p_value: 0.0001 | still has information lag: 23 | p_value: 0.0001 | still has information lag: 24 | p_value: 0.0001 | still has information lag: 25 | p_value: 0.0002 | still has information lag: 26 | p_value: 0.0002 | still has information lag: 27 | p_value: 0.0001 | still has information lag: 28 | p_value: 0.0001 | still has information lag: 29 | p_value: 0.0000 | still has information lag: 30 | p_value: 0.0000 | still has information lag: 31 | p_value: 0.0000 | still has information lag: 32 | p_value: 0.0000 | still has information lag: 33 | p_value: 0.0000 | still has information lag: 34 | p_value: 0.0000 | still has information lag: 35 | p_value: 0.0000 | still has information lag: 36 | p_value: 0.0000 | still has information lag: 37 | p_value: 0.0000 | still has information lag: 38 | p_value: 0.0000 | still has information lag: 39 | p_value: 0.0000 | still has information lag: 40 | p_value: 0.0000 | still has information
Now we will try order = 35 to see if we can caputure this missing information:
model_fit = ar_model.fit(maxlag=35)
print('Lag: %s' % model_fit.k_ar)
print('Coefficients: %s' % model_fit.params)
intercept = model_fit.params[0]
coeff = list(model_fit.params)[1:] # drop the intercept
coeff.reverse()
Lag: 35 Coefficients: const 0.058063 L1.VIXCM60_Premium 0.770680 L2.VIXCM60_Premium 0.075459 L3.VIXCM60_Premium 0.058299 L4.VIXCM60_Premium -0.020664 L5.VIXCM60_Premium 0.078253 L6.VIXCM60_Premium -0.016905 L7.VIXCM60_Premium -0.040313 L8.VIXCM60_Premium 0.073473 L9.VIXCM60_Premium 0.001452 L10.VIXCM60_Premium 0.092616 L11.VIXCM60_Premium -0.074851 L12.VIXCM60_Premium -0.030249 L13.VIXCM60_Premium -0.005710 L14.VIXCM60_Premium 0.041038 L15.VIXCM60_Premium -0.015750 L16.VIXCM60_Premium 0.033834 L17.VIXCM60_Premium -0.022752 L18.VIXCM60_Premium -0.054545 L19.VIXCM60_Premium 0.051477 L20.VIXCM60_Premium -0.023131 L21.VIXCM60_Premium -0.048491 L22.VIXCM60_Premium 0.049677 L23.VIXCM60_Premium -0.005402 L24.VIXCM60_Premium -0.010091 L25.VIXCM60_Premium 0.022448 L26.VIXCM60_Premium -0.028361 L27.VIXCM60_Premium 0.051241 L28.VIXCM60_Premium -0.018284 L29.VIXCM60_Premium -0.052608 L30.VIXCM60_Premium 0.063715 L31.VIXCM60_Premium 0.017999 L32.VIXCM60_Premium -0.035968 L33.VIXCM60_Premium -0.036698 L34.VIXCM60_Premium -0.025873 L35.VIXCM60_Premium 0.051801 dtype: float64
print('RMSE (in sample):',np.sqrt(np.mean(model_fit.sigma2)))
RMSE (in sample): 1.350898393202161
We will check to make sure that there is no information left in the residuals and that they are stationary:
run_ljung_box(model_fit.resid, lags=100)
Ljung-Box Test
plot_pacf(model_fit.resid, lags=40) # PACF looks good
model_fit.resid.plot()
<matplotlib.axes._subplots.AxesSubplot at 0x1c1def65c0>
run_ad_fuller(model_fit.resid)
ADF Statistic: -48.165053 p-value: 0.000000 Critical Values: 1%: -3.433 5%: -2.863 10%: -2.567 We reject the Null Hypothesis (H0) -- the time series is stationary
This model seems to caputure all the data so we will move forward.
Let's check to see how our model is performing on our test data:
H = train['VIXCM60_Premium'].tolist()
AR_ORDER = 35
AR_prem1 = []
AR_PRED1=[]
VIX = []
ACTUAL_FUTURE1=[]
n = 0
for d,row in train.tail(1).iterrows():
VIX +=[row['VIX']]
ACTUAL_prem1 = []
for d,row in test.iterrows():
ar_model = AR(H)
ar_fit = ar_model.fit(maxlag=AR_ORDER)
y_hat = ar_fit.predict(start=len(H), end=len(H) + 5)[0]
AR_prem1 += [y_hat]
AR_PRED1+=[y_hat+VIX[-1]]
VIX+=[row['VIX']]
H += [row['VIXCM60_Premium']]
ACTUAL_prem1 += [row['VIXCM60_Premium']]
ACTUAL_FUTURE1+=[row['VIXCM60']]
n += 1
# plotting the first 200 points
plt.plot(AR_PRED1[:200], label='predicted')
plt.plot(ACTUAL_FUTURE1[:200], color='b', label='actual')
plt.legend()
<matplotlib.legend.Legend at 0x1c1eec9a90>
plot_confusion_matrix(ACTUAL_FUTURE1, AR_PRED1)
Confusion Matrix Raw [[169 132] [165 122]] Normalized [[0.28741497 0.2244898 ] [0.28061224 0.20748299]]
{'cm': array([[169, 132],
[165, 122]]), 'cm_norm': array([[0.28741497, 0.2244898 ],
[0.28061224, 0.20748299]])}
ar_error2 = measure_error(ACTUAL_FUTURE1, AR_PRED1, label='AR(35)')
print(ar_error2)
RMSE = 0.6675266219320293
RMSE_pcent = 0.036184144108954146
label = AR(35)
{'RMSE': 0.6675266219320293, 'RMSE_pcent': 0.036184144108954146, 'label': 'AR(35)'}
make_money(ACTUAL_FUTURE1,AR_PRED1)
Total profit or loss: 0.9495 Number of correct days: 291 total incorrect days: 297
Now we will look at the PACF plot of the premimums:
plot_pacf(train['VIXCM90_Premium'],lags=50)
plt.show()
# train autoregression
ar_model = AR(train['VIXCM90_Premium'])
Now we will check what order model is best from AIC and BIC scores:
aic = ar_model.select_order(maxlag=30,ic='aic',trend='nc')
bic = ar_model.select_order(maxlag=30,ic='bic',trend='nc')
print('model order (AIC): %d' % aic)
print('model order (BIC): %d' % bic)
model order (AIC): 30 model order (BIC): 5
Above we can see that the AIC score suggests 30 lags is the best, but this is the max lag we allowed it to use, so we will increase the max to see if it tells us something different.
aic = ar_model.select_order(maxlag=50,ic='aic',trend='nc')
bic = ar_model.select_order(maxlag=50,ic='bic',trend='nc')
print('model order (AIC): %d' % aic)
print('model order (BIC): %d' % bic)
model order (AIC): 35 model order (BIC): 5
We will try order = 5 since it is the simplest model:
model_fit = ar_model.fit(maxlag=5)
print('Lag: %s' % model_fit.k_ar)
print('Coefficients: %s' % model_fit.params)
intercept = model_fit.params[0]
coeff = list(model_fit.params)[1:] # drop the intercept
coeff.reverse()
Lag: 5 Coefficients: const 0.055619 L1.VIXCM90_Premium 0.769776 L2.VIXCM90_Premium 0.086111 L3.VIXCM90_Premium 0.058301 L4.VIXCM90_Premium -0.030630 L5.VIXCM90_Premium 0.090508 dtype: float64
print('RMSE (in sample):',np.sqrt(np.mean(model_fit.sigma2)))
RMSE (in sample): 1.4572788232492344
We will check to make sure that there is no information left in the residuals and that they are stationary:
run_ljung_box(model_fit.resid, lags=40)
Ljung-Box Test lag: 10 | p_value: 0.0001 | still has information lag: 11 | p_value: 0.0001 | still has information lag: 12 | p_value: 0.0001 | still has information lag: 13 | p_value: 0.0002 | still has information lag: 14 | p_value: 0.0002 | still has information lag: 15 | p_value: 0.0003 | still has information lag: 16 | p_value: 0.0001 | still has information lag: 17 | p_value: 0.0001 | still has information lag: 18 | p_value: 0.0001 | still has information lag: 19 | p_value: 0.0001 | still has information lag: 20 | p_value: 0.0001 | still has information lag: 21 | p_value: 0.0000 | still has information lag: 22 | p_value: 0.0001 | still has information lag: 23 | p_value: 0.0001 | still has information lag: 24 | p_value: 0.0001 | still has information lag: 25 | p_value: 0.0001 | still has information lag: 26 | p_value: 0.0002 | still has information lag: 27 | p_value: 0.0001 | still has information lag: 28 | p_value: 0.0001 | still has information lag: 29 | p_value: 0.0000 | still has information lag: 30 | p_value: 0.0000 | still has information lag: 31 | p_value: 0.0000 | still has information lag: 32 | p_value: 0.0000 | still has information lag: 33 | p_value: 0.0000 | still has information lag: 34 | p_value: 0.0000 | still has information lag: 35 | p_value: 0.0000 | still has information lag: 36 | p_value: 0.0000 | still has information lag: 37 | p_value: 0.0000 | still has information lag: 38 | p_value: 0.0000 | still has information lag: 39 | p_value: 0.0000 | still has information lag: 40 | p_value: 0.0000 | still has information
Now we will try order = 35 to see if we can caputure this missing information:
model_fit = ar_model.fit(maxlag=35)
print('Lag: %s' % model_fit.k_ar)
print('Coefficients: %s' % model_fit.params)
intercept = model_fit.params[0]
coeff = list(model_fit.params)[1:] # drop the intercept
coeff.reverse()
Lag: 35 Coefficients: const 0.058823 L1.VIXCM90_Premium 0.768462 L2.VIXCM90_Premium 0.092559 L3.VIXCM90_Premium 0.053360 L4.VIXCM90_Premium -0.026933 L5.VIXCM90_Premium 0.077405 L6.VIXCM90_Premium -0.020206 L7.VIXCM90_Premium -0.037089 L8.VIXCM90_Premium 0.069401 L9.VIXCM90_Premium 0.012646 L10.VIXCM90_Premium 0.089986 L11.VIXCM90_Premium -0.076410 L12.VIXCM90_Premium -0.035494 L13.VIXCM90_Premium -0.006018 L14.VIXCM90_Premium 0.041574 L15.VIXCM90_Premium -0.017509 L16.VIXCM90_Premium 0.042083 L17.VIXCM90_Premium -0.021666 L18.VIXCM90_Premium -0.061964 L19.VIXCM90_Premium 0.046902 L20.VIXCM90_Premium -0.013907 L21.VIXCM90_Premium -0.046341 L22.VIXCM90_Premium 0.040027 L23.VIXCM90_Premium 0.002175 L24.VIXCM90_Premium -0.013742 L25.VIXCM90_Premium 0.025134 L26.VIXCM90_Premium -0.027931 L27.VIXCM90_Premium 0.050044 L28.VIXCM90_Premium -0.010551 L29.VIXCM90_Premium -0.052993 L30.VIXCM90_Premium 0.061218 L31.VIXCM90_Premium 0.016391 L32.VIXCM90_Premium -0.040458 L33.VIXCM90_Premium -0.034215 L34.VIXCM90_Premium -0.024925 L35.VIXCM90_Premium 0.051781 dtype: float64
print('RMSE (in sample):',np.sqrt(np.mean(model_fit.sigma2)))
RMSE (in sample): 1.4367561404498372
We will check to make sure that there is no information left in the residuals and that they are stationary:
run_ljung_box(model_fit.resid, lags=100)
Ljung-Box Test
plot_pacf(model_fit.resid, lags=40) # PACF looks good
model_fit.resid.plot()
<matplotlib.axes._subplots.AxesSubplot at 0x1c2c267f98>
run_ad_fuller(model_fit.resid)
ADF Statistic: -48.148136 p-value: 0.000000 Critical Values: 1%: -3.433 5%: -2.863 10%: -2.567 We reject the Null Hypothesis (H0) -- the time series is stationary
This model seems to caputure all the data so we will move forward.
Let's check to see how our model is performing on our test data:
H = train['VIXCM90_Premium'].tolist()
AR_ORDER = 35
AR_prem2 = []
AR_PRED2=[]
VIX = []
ACTUAL_FUTURE2=[]
n = 0
for d,row in train.tail(1).iterrows():
VIX +=[row['VIX']]
ACTUAL_prem2 = []
for d,row in test.iterrows():
ar_model = AR(H)
ar_fit = ar_model.fit(maxlag=AR_ORDER)
y_hat = ar_fit.predict(start=len(H), end=len(H) + 5)[0]
AR_prem2 += [y_hat]
AR_PRED2+=[y_hat+VIX[-1]]
VIX+=[row['VIX']]
H += [row['VIXCM90_Premium']]
ACTUAL_prem2 += [row['VIXCM90_Premium']]
ACTUAL_FUTURE2+=[row['VIXCM90']]
n += 1
# plotting the first 200 points
plt.plot(AR_PRED2[:200], label='predicted')
plt.plot(ACTUAL_FUTURE2[:200], color='b', label='actual')
plt.legend()
<matplotlib.legend.Legend at 0x1c2c302fd0>
plot_confusion_matrix(ACTUAL_FUTURE2, AR_PRED2)
Confusion Matrix Raw [[160 134] [168 126]] Normalized [[0.27210884 0.22789116] [0.28571429 0.21428571]]
{'cm': array([[160, 134],
[168, 126]]), 'cm_norm': array([[0.27210884, 0.22789116],
[0.28571429, 0.21428571]])}
ar_error2 = measure_error(ACTUAL_FUTURE2, AR_PRED2, label='AR(35)')
print(ar_error2)
RMSE = 0.6043100435289301
RMSE_pcent = 0.031809285219358684
label = AR(35)
{'RMSE': 0.6043100435289301, 'RMSE_pcent': 0.031809285219358684, 'label': 'AR(35)'}
make_money(ACTUAL_FUTURE2, AR_PRED2)
Total profit or loss: 2.9856 Number of correct days: 286 total incorrect days: 302
First we will look at the PACF plot of the premimums:
plot_pacf(train['VIXCM120_Premium'],lags=50)
plt.show()
# train autoregression
ar_model = AR(train['VIXCM120_Premium'])
Now we will check what order model is best from AIC and BIC scores:
aic = ar_model.select_order(maxlag=30,ic='aic',trend='nc')
bic = ar_model.select_order(maxlag=30,ic='bic',trend='nc')
print('model order (AIC): %d' % aic)
print('model order (BIC): %d' % bic)
model order (AIC): 30 model order (BIC): 5
Above we can see that the AIC score suggests 30 lags is the best, but this is the max lag we allowed it to use, so we will increase the max to see if it tells us something different.
aic = ar_model.select_order(maxlag=50,ic='aic',trend='nc')
bic = ar_model.select_order(maxlag=50,ic='bic',trend='nc')
print('model order (AIC): %d' % aic)
print('model order (BIC): %d' % bic)
model order (AIC): 35 model order (BIC): 5
We will try order = 5 since it is the simplest model:
model_fit = ar_model.fit(maxlag=5)
print('Lag: %s' % model_fit.k_ar)
print('Coefficients: %s' % model_fit.params)
intercept = model_fit.params[0]
coeff = list(model_fit.params)[1:] # drop the intercept
coeff.reverse()
Lag: 5 Coefficients: const 0.058267 L1.VIXCM120_Premium 0.766850 L2.VIXCM120_Premium 0.089744 L3.VIXCM120_Premium 0.064586 L4.VIXCM120_Premium -0.040856 L5.VIXCM120_Premium 0.096360 dtype: float64
print('RMSE (in sample):',np.sqrt(np.mean(model_fit.sigma2)))
RMSE (in sample): 1.5162838219095407
We will check to make sure that there is no information left in the residuals and that they are stationary:
run_ljung_box(model_fit.resid, lags=100)
Ljung-Box Test lag: 10 | p_value: 0.0000 | still has information lag: 11 | p_value: 0.0000 | still has information lag: 12 | p_value: 0.0000 | still has information lag: 13 | p_value: 0.0001 | still has information lag: 14 | p_value: 0.0001 | still has information lag: 15 | p_value: 0.0001 | still has information lag: 16 | p_value: 0.0000 | still has information lag: 17 | p_value: 0.0000 | still has information lag: 18 | p_value: 0.0000 | still has information lag: 19 | p_value: 0.0000 | still has information lag: 20 | p_value: 0.0000 | still has information lag: 21 | p_value: 0.0000 | still has information lag: 22 | p_value: 0.0000 | still has information lag: 23 | p_value: 0.0000 | still has information lag: 24 | p_value: 0.0000 | still has information lag: 25 | p_value: 0.0000 | still has information lag: 26 | p_value: 0.0000 | still has information lag: 27 | p_value: 0.0000 | still has information lag: 28 | p_value: 0.0000 | still has information lag: 29 | p_value: 0.0000 | still has information lag: 30 | p_value: 0.0000 | still has information lag: 31 | p_value: 0.0000 | still has information lag: 32 | p_value: 0.0000 | still has information lag: 33 | p_value: 0.0000 | still has information lag: 34 | p_value: 0.0000 | still has information lag: 35 | p_value: 0.0000 | still has information lag: 36 | p_value: 0.0000 | still has information lag: 37 | p_value: 0.0000 | still has information lag: 38 | p_value: 0.0000 | still has information lag: 39 | p_value: 0.0000 | still has information lag: 40 | p_value: 0.0000 | still has information lag: 41 | p_value: 0.0000 | still has information lag: 42 | p_value: 0.0000 | still has information lag: 43 | p_value: 0.0000 | still has information lag: 44 | p_value: 0.0000 | still has information lag: 45 | p_value: 0.0000 | still has information lag: 46 | p_value: 0.0000 | still has information lag: 47 | p_value: 0.0000 | still has information lag: 48 | p_value: 0.0000 | still has information lag: 49 | p_value: 0.0000 | still has information lag: 50 | p_value: 0.0000 | still has information lag: 51 | p_value: 0.0000 | still has information lag: 52 | p_value: 0.0000 | still has information lag: 53 | p_value: 0.0000 | still has information lag: 54 | p_value: 0.0000 | still has information lag: 55 | p_value: 0.0000 | still has information lag: 56 | p_value: 0.0000 | still has information lag: 57 | p_value: 0.0000 | still has information lag: 58 | p_value: 0.0000 | still has information lag: 59 | p_value: 0.0000 | still has information lag: 60 | p_value: 0.0000 | still has information lag: 61 | p_value: 0.0000 | still has information lag: 62 | p_value: 0.0000 | still has information lag: 63 | p_value: 0.0000 | still has information lag: 64 | p_value: 0.0000 | still has information lag: 65 | p_value: 0.0000 | still has information lag: 66 | p_value: 0.0000 | still has information lag: 67 | p_value: 0.0000 | still has information lag: 68 | p_value: 0.0000 | still has information lag: 69 | p_value: 0.0000 | still has information lag: 70 | p_value: 0.0000 | still has information lag: 71 | p_value: 0.0000 | still has information lag: 72 | p_value: 0.0000 | still has information lag: 73 | p_value: 0.0000 | still has information lag: 74 | p_value: 0.0000 | still has information lag: 75 | p_value: 0.0000 | still has information lag: 76 | p_value: 0.0000 | still has information lag: 77 | p_value: 0.0000 | still has information lag: 78 | p_value: 0.0000 | still has information lag: 79 | p_value: 0.0000 | still has information lag: 80 | p_value: 0.0000 | still has information lag: 81 | p_value: 0.0000 | still has information lag: 82 | p_value: 0.0000 | still has information lag: 83 | p_value: 0.0000 | still has information lag: 84 | p_value: 0.0000 | still has information lag: 85 | p_value: 0.0000 | still has information lag: 86 | p_value: 0.0000 | still has information lag: 87 | p_value: 0.0000 | still has information lag: 88 | p_value: 0.0000 | still has information lag: 89 | p_value: 0.0000 | still has information lag: 90 | p_value: 0.0000 | still has information lag: 91 | p_value: 0.0000 | still has information lag: 92 | p_value: 0.0000 | still has information lag: 93 | p_value: 0.0000 | still has information lag: 94 | p_value: 0.0000 | still has information lag: 95 | p_value: 0.0000 | still has information lag: 96 | p_value: 0.0000 | still has information lag: 97 | p_value: 0.0000 | still has information lag: 98 | p_value: 0.0000 | still has information lag: 99 | p_value: 0.0000 | still has information lag: 100 | p_value: 0.0000 | still has information
Now we will try order = 35 to see if we can caputure this missing information:
model_fit = ar_model.fit(maxlag=35)
print('Lag: %s' % model_fit.k_ar)
print('Coefficients: %s' % model_fit.params)
intercept = model_fit.params[0]
coeff = list(model_fit.params)[1:] # drop the intercept
coeff.reverse()
Lag: 35 Coefficients: const 0.062413 L1.VIXCM120_Premium 0.768963 L2.VIXCM120_Premium 0.095499 L3.VIXCM120_Premium 0.056335 L4.VIXCM120_Premium -0.033309 L5.VIXCM120_Premium 0.083427 L6.VIXCM120_Premium -0.015385 L7.VIXCM120_Premium -0.042856 L8.VIXCM120_Premium 0.070788 L9.VIXCM120_Premium 0.005830 L10.VIXCM120_Premium 0.099565 L11.VIXCM120_Premium -0.087578 L12.VIXCM120_Premium -0.040525 L13.VIXCM120_Premium 0.003309 L14.VIXCM120_Premium 0.032214 L15.VIXCM120_Premium -0.019641 L16.VIXCM120_Premium 0.051833 L17.VIXCM120_Premium -0.025325 L18.VIXCM120_Premium -0.063755 L19.VIXCM120_Premium 0.058128 L20.VIXCM120_Premium -0.012063 L21.VIXCM120_Premium -0.054492 L22.VIXCM120_Premium 0.052034 L23.VIXCM120_Premium -0.002868 L24.VIXCM120_Premium -0.017726 L25.VIXCM120_Premium 0.027768 L26.VIXCM120_Premium -0.022699 L27.VIXCM120_Premium 0.039281 L28.VIXCM120_Premium -0.000798 L29.VIXCM120_Premium -0.058814 L30.VIXCM120_Premium 0.061087 L31.VIXCM120_Premium 0.009791 L32.VIXCM120_Premium -0.033172 L33.VIXCM120_Premium -0.034713 L34.VIXCM120_Premium -0.024950 L35.VIXCM120_Premium 0.049975 dtype: float64
print('RMSE (in sample):',np.sqrt(np.mean(model_fit.sigma2)))
RMSE (in sample): 1.4936875650982426
We will check to make sure that there is no information left in the residuals and that they are stationary:
run_ljung_box(model_fit.resid, lags=100)
Ljung-Box Test
plot_pacf(model_fit.resid, lags=40) # PACF looks good
model_fit.resid.plot()
<matplotlib.axes._subplots.AxesSubplot at 0x1c2e3a7470>
run_ad_fuller(model_fit.resid)
ADF Statistic: -48.151979 p-value: 0.000000 Critical Values: 1%: -3.433 5%: -2.863 10%: -2.567 We reject the Null Hypothesis (H0) -- the time series is stationary
This model seems to caputure all the data so we will move forward.
Let's check to see how our model is performing on our test data:
H = train['VIXCM120_Premium'].tolist()
AR_ORDER = 35
AR_prem3 = []
AR_PRED3=[]
VIX = []
ACTUAL_FUTURE3=[]
n = 0
for d,row in train.tail(1).iterrows():
VIX +=[row['VIX']]
ACTUAL_prem3 = []
for d,row in test.iterrows():
ar_model = AR(H)
ar_fit = ar_model.fit(maxlag=AR_ORDER)
y_hat = ar_fit.predict(start=len(H), end=len(H) + 5)[0]
AR_prem3 += [y_hat]
AR_PRED3+=[y_hat+VIX[-1]]
VIX+=[row['VIX']]
H += [row['VIXCM120_Premium']]
ACTUAL_prem3 += [row['VIXCM120_Premium']]
ACTUAL_FUTURE3+=[row['VIXCM120']]
n += 1
# plotting the first 200 points
plt.plot(AR_PRED3[:200], label='predicted')
plt.plot(ACTUAL_FUTURE3[:200], color='b', label='actual')
plt.legend()
<matplotlib.legend.Legend at 0x1c1d24e588>
plot_confusion_matrix(ACTUAL_FUTURE3, AR_PRED3)
Confusion Matrix Raw [[154 132] [173 129]] Normalized [[0.26190476 0.2244898 ] [0.29421769 0.21938776]]
{'cm': array([[154, 132],
[173, 129]]), 'cm_norm': array([[0.26190476, 0.2244898 ],
[0.29421769, 0.21938776]])}
ar_error2 = measure_error(ACTUAL_FUTURE3, AR_PRED3, label='AR(35)')
print(ar_error2)
RMSE = 0.5713382942295682
RMSE_pcent = 0.029363389373415195
label = AR(35)
{'RMSE': 0.5713382942295682, 'RMSE_pcent': 0.029363389373415195, 'label': 'AR(35)'}
make_money(ACTUAL_FUTURE3, AR_PRED3)
Total profit or loss: 4.7723 Number of correct days: 283 total incorrect days: 305
First we will look at the PACF plot of the premimums:
plot_pacf(train['VIXCM150_Premium'],lags=50)
plt.show()
# train autoregression
ar_model = AR(train['VIXCM150_Premium'])
Now we will check what order model is best from AIC and BIC scores:
aic = ar_model.select_order(maxlag=30,ic='aic',trend='nc')
bic = ar_model.select_order(maxlag=30,ic='bic',trend='nc')
print('model order (AIC): %d' % aic)
print('model order (BIC): %d' % bic)
model order (AIC): 30 model order (BIC): 5
Above we can see that the AIC score suggests 30 lags is the best, but this is the max lag we allowed it to use, so we will increase the max to see if it tells us something different.
aic = ar_model.select_order(maxlag=50,ic='aic',trend='nc')
bic = ar_model.select_order(maxlag=50,ic='bic',trend='nc')
print('model order (AIC): %d' % aic)
print('model order (BIC): %d' % bic)
model order (AIC): 49 model order (BIC): 11
We will try order = 11 since it is the simplest model:
model_fit = ar_model.fit(maxlag=11)
print('Lag: %s' % model_fit.k_ar)
print('Coefficients: %s' % model_fit.params)
intercept = model_fit.params[0]
coeff = list(model_fit.params)[1:] # drop the intercept
coeff.reverse()
Lag: 11 Coefficients: const 0.061096 L1.VIXCM150_Premium 0.767740 L2.VIXCM150_Premium 0.091239 L3.VIXCM150_Premium 0.072728 L4.VIXCM150_Premium -0.054116 L5.VIXCM150_Premium 0.098360 L6.VIXCM150_Premium -0.000875 L7.VIXCM150_Premium -0.067692 L8.VIXCM150_Premium 0.080361 L9.VIXCM150_Premium -0.000225 L10.VIXCM150_Premium 0.099617 L11.VIXCM150_Premium -0.108832 dtype: float64
print('RMSE (in sample):',np.sqrt(np.mean(model_fit.sigma2)))
RMSE (in sample): 1.5721360055497136
We will check to make sure that there is no information left in the residuals and that they are stationary:
run_ljung_box(model_fit.resid, lags=100)
Ljung-Box Test lag: 30 | p_value: 0.0346 | still has information lag: 31 | p_value: 0.0362 | still has information lag: 32 | p_value: 0.0466 | still has information lag: 34 | p_value: 0.0090 | still has information lag: 35 | p_value: 0.0120 | still has information lag: 36 | p_value: 0.0143 | still has information lag: 37 | p_value: 0.0186 | still has information lag: 38 | p_value: 0.0093 | still has information lag: 39 | p_value: 0.0101 | still has information lag: 40 | p_value: 0.0130 | still has information lag: 41 | p_value: 0.0155 | still has information lag: 42 | p_value: 0.0184 | still has information lag: 43 | p_value: 0.0092 | still has information lag: 44 | p_value: 0.0118 | still has information lag: 45 | p_value: 0.0120 | still has information lag: 46 | p_value: 0.0142 | still has information lag: 47 | p_value: 0.0132 | still has information lag: 48 | p_value: 0.0010 | still has information lag: 49 | p_value: 0.0014 | still has information lag: 50 | p_value: 0.0001 | still has information lag: 51 | p_value: 0.0001 | still has information lag: 52 | p_value: 0.0001 | still has information lag: 53 | p_value: 0.0001 | still has information lag: 54 | p_value: 0.0001 | still has information lag: 55 | p_value: 0.0001 | still has information lag: 56 | p_value: 0.0000 | still has information lag: 57 | p_value: 0.0000 | still has information lag: 58 | p_value: 0.0000 | still has information lag: 59 | p_value: 0.0000 | still has information lag: 60 | p_value: 0.0000 | still has information lag: 61 | p_value: 0.0000 | still has information lag: 62 | p_value: 0.0000 | still has information lag: 63 | p_value: 0.0000 | still has information lag: 64 | p_value: 0.0000 | still has information lag: 65 | p_value: 0.0000 | still has information lag: 66 | p_value: 0.0000 | still has information lag: 67 | p_value: 0.0000 | still has information lag: 68 | p_value: 0.0000 | still has information lag: 69 | p_value: 0.0000 | still has information lag: 70 | p_value: 0.0000 | still has information lag: 71 | p_value: 0.0000 | still has information lag: 72 | p_value: 0.0000 | still has information lag: 73 | p_value: 0.0000 | still has information lag: 74 | p_value: 0.0000 | still has information lag: 75 | p_value: 0.0000 | still has information lag: 76 | p_value: 0.0000 | still has information lag: 77 | p_value: 0.0000 | still has information lag: 78 | p_value: 0.0000 | still has information lag: 79 | p_value: 0.0000 | still has information lag: 80 | p_value: 0.0000 | still has information lag: 81 | p_value: 0.0000 | still has information lag: 82 | p_value: 0.0000 | still has information lag: 83 | p_value: 0.0000 | still has information lag: 84 | p_value: 0.0000 | still has information lag: 85 | p_value: 0.0000 | still has information lag: 86 | p_value: 0.0000 | still has information lag: 87 | p_value: 0.0000 | still has information lag: 88 | p_value: 0.0000 | still has information lag: 89 | p_value: 0.0000 | still has information lag: 90 | p_value: 0.0000 | still has information lag: 91 | p_value: 0.0000 | still has information lag: 92 | p_value: 0.0000 | still has information lag: 93 | p_value: 0.0000 | still has information lag: 94 | p_value: 0.0000 | still has information lag: 95 | p_value: 0.0000 | still has information lag: 96 | p_value: 0.0000 | still has information lag: 97 | p_value: 0.0000 | still has information lag: 98 | p_value: 0.0000 | still has information lag: 99 | p_value: 0.0000 | still has information lag: 100 | p_value: 0.0000 | still has information
Now we will try order = 49 to see if we can caputure this missing information:
model_fit = ar_model.fit(maxlag=49)
print('Lag: %s' % model_fit.k_ar)
print('Coefficients: %s' % model_fit.params)
intercept = model_fit.params[0]
coeff = list(model_fit.params)[1:] # drop the intercept
coeff.reverse()
Lag: 49 Coefficients: const 0.061411 L1.VIXCM150_Premium 0.769016 L2.VIXCM150_Premium 0.096952 L3.VIXCM150_Premium 0.062871 L4.VIXCM150_Premium -0.037810 L5.VIXCM150_Premium 0.087700 L6.VIXCM150_Premium -0.010294 L7.VIXCM150_Premium -0.053452 L8.VIXCM150_Premium 0.072443 L9.VIXCM150_Premium 0.000395 L10.VIXCM150_Premium 0.108985 L11.VIXCM150_Premium -0.098503 L12.VIXCM150_Premium -0.045901 L13.VIXCM150_Premium 0.011974 L14.VIXCM150_Premium 0.023194 L15.VIXCM150_Premium -0.017758 L16.VIXCM150_Premium 0.063046 L17.VIXCM150_Premium -0.032031 L18.VIXCM150_Premium -0.062360 L19.VIXCM150_Premium 0.062948 L20.VIXCM150_Premium -0.006435 L21.VIXCM150_Premium -0.060082 L22.VIXCM150_Premium 0.056023 L23.VIXCM150_Premium -0.002980 L24.VIXCM150_Premium -0.024719 L25.VIXCM150_Premium 0.030095 L26.VIXCM150_Premium -0.012988 L27.VIXCM150_Premium 0.026033 L28.VIXCM150_Premium 0.001659 L29.VIXCM150_Premium -0.057286 L30.VIXCM150_Premium 0.056237 L31.VIXCM150_Premium 0.006627 L32.VIXCM150_Premium -0.021767 L33.VIXCM150_Premium -0.036470 L34.VIXCM150_Premium -0.026932 L35.VIXCM150_Premium 0.039700 L36.VIXCM150_Premium 0.006839 L37.VIXCM150_Premium 0.016915 L38.VIXCM150_Premium 0.027289 L39.VIXCM150_Premium -0.011537 L40.VIXCM150_Premium -0.014068 L41.VIXCM150_Premium -0.034881 L42.VIXCM150_Premium 0.004053 L43.VIXCM150_Premium -0.013500 L44.VIXCM150_Premium 0.016294 L45.VIXCM150_Premium -0.006829 L46.VIXCM150_Premium -0.006763 L47.VIXCM150_Premium 0.034234 L48.VIXCM150_Premium -0.063235 L49.VIXCM150_Premium 0.055298 dtype: float64
print('RMSE (in sample):',np.sqrt(np.mean(model_fit.sigma2)))
RMSE (in sample): 1.5569022259180805
We will check to make sure that there is no information left in the residuals and that they are stationary:
run_ljung_box(model_fit.resid, lags=100)
Ljung-Box Test
plot_pacf(model_fit.resid, lags=40) # PACF looks good
model_fit.resid.plot()
<matplotlib.axes._subplots.AxesSubplot at 0x1c2e27ee48>
run_ad_fuller(model_fit.resid)
ADF Statistic: -47.991732 p-value: 0.000000 Critical Values: 1%: -3.433 5%: -2.863 10%: -2.567 We reject the Null Hypothesis (H0) -- the time series is stationary
This model seems to caputure all the data so we will move forward.
Let's check to see how our model is performing on our test data:
H = train['VIXCM150_Premium'].tolist()
AR_ORDER = 49
AR_prem4 = []
AR_PRED4=[]
VIX = []
ACTUAL_FUTURE4=[]
n = 0
for d,row in train.tail(1).iterrows():
VIX +=[row['VIX']]
ACTUAL_prem4 = []
for d,row in test.iterrows():
ar_model = AR(H)
ar_fit = ar_model.fit(maxlag=AR_ORDER)
y_hat = ar_fit.predict(start=len(H), end=len(H) + 5)[0]
AR_prem4 += [y_hat]
AR_PRED4+=[y_hat+VIX[-1]]
VIX+=[row['VIX']]
H += [row['VIXCM150_Premium']]
ACTUAL_prem4 += [row['VIXCM150_Premium']]
ACTUAL_FUTURE4+=[row['VIXCM150']]
n += 1
# plotting the first 200 points
plt.plot(AR_PRED4[:200], label='predicted')
plt.plot(ACTUAL_FUTURE4[:200], color='b', label='actual')
plt.legend()
<matplotlib.legend.Legend at 0x1c2ec1b7f0>
plot_confusion_matrix(ACTUAL_FUTURE4, AR_PRED4)
Confusion Matrix Raw [[153 135] [159 141]] Normalized [[0.26020408 0.22959184] [0.27040816 0.23979592]]
{'cm': array([[153, 135],
[159, 141]]), 'cm_norm': array([[0.26020408, 0.22959184],
[0.27040816, 0.23979592]])}
ar_error2 = measure_error(ACTUAL_FUTURE4, AR_PRED4, label='AR(49)')
print(ar_error2)
RMSE = 0.5579351162667225
RMSE_pcent = 0.02801621318220747
label = AR(49)
{'RMSE': 0.5579351162667225, 'RMSE_pcent': 0.02801621318220747, 'label': 'AR(49)'}
make_money(ACTUAL_FUTURE4, AR_PRED4)
Total profit or loss: 5.1826 Number of correct days: 294 total incorrect days: 294
| data | model | RMSE_Percent | PnL |
|---|---|---|---|
| VIXCM30_Premium | AR(49) | 0.04811 | +\$ 5.9271 |
| VIXCM60_Premium | AR(35) | 0.03618 | +\$ 0.9495 |
| VIXCM90_Premium | AR(35) | 0.03180 | +\$ 2.9856 |
| VIXCM120_Premium | AR(35) | 0.02930 | +\$ 4.7723 |
| VIXCM150_Premium | AR(49) | 0.02801 | +\$ 5.1826 |
Note - we did try to add rolling forecasts, limiting the look back to 180, 90, and 60 day, but this actually made the model perform worse, so we decided to keep all historical data.
*Note we will conclude that all variables used in the following model are stationary as we already checked above.
First let's take a look at the PACF and ACF:
X= train['VIXCM30_Premium']
auto = acf(X)
print(auto)
y = plot_acf(X, lags=60)
plt.show()
y = plot_pacf(X, lags=40)
plt.show()
[1. 0.93263131 0.89195702 0.86420381 0.84062773 0.82582195 0.80192867 0.7817517 0.77243611 0.75746196 0.74231963 0.71630187 0.68909109 0.66980097 0.65316705 0.63086905 0.60978293 0.58550488 0.55986135 0.54301903 0.519158 0.49584697 0.47905112 0.45706396 0.43962203 0.42561678 0.40758918 0.39513983 0.37418473 0.35365751 0.34368655 0.32654458 0.30575451 0.28431679 0.26717804 0.26069954 0.25273016 0.24239629 0.23019658 0.21311427 0.1994161 ]
We will runa grid search to help us decide what p and q values to use in our model:
grid_result = run_grid_over_arma(X, p_lags=list(range(5)), q_lags=range(5))
['p(0)', 'n/a', '10572.12', ' 9608.72', ' 8927.59', ' 8681.60'] ['p(1)', ' 7682.91', ' 7595.44', ' 7570.22', ' 7573.93', ' 7576.89'] ['p(2)', ' 7619.48', ' 7564.96', ' 7572.30', ' 7580.01', 'n/a'] ['p(3)', ' 7591.93', ' 7572.28', ' 7578.77', ' 7586.41', ' 7565.84'] ['p(4)', ' 7588.03', ' 7579.89', ' 7587.41', 'n/a', 'n/a'] +------+----------+----------+----------+----------+----------+ | p\q | q (0) | q (1) | q (2) | q (3) | q (4) | +------+----------+----------+----------+----------+----------+ | p(0) | n/a | 10572.12 | 9608.72 | 8927.59 | 8681.60 | | p(1) | 7682.91 | 7595.44 | 7570.22 | 7573.93 | 7576.89 | | p(2) | 7619.48 | 7564.96 | 7572.30 | 7580.01 | n/a | | p(3) | 7591.93 | 7572.28 | 7578.77 | 7586.41 | 7565.84 | | p(4) | 7588.03 | 7579.89 | 7587.41 | n/a | n/a | +------+----------+----------+----------+----------+----------+ bad values: [(0, 0), (2, 4), (4, 3), (4, 4)] time required 9.743584871292114
b = sorted(list(grid_result.items()), key=lambda k: k[1])
print(b)
[((2, 1), 7564.957472965394), ((3, 4), 7565.8438456627155), ((1, 2), 7570.21967803622), ((3, 1), 7572.279762769047), ((2, 2), 7572.299550456719), ((1, 3), 7573.930732382952), ((1, 4), 7576.894075550767), ((3, 2), 7578.7665201778), ((4, 1), 7579.892549913009), ((2, 3), 7580.011855675479), ((3, 3), 7586.414384936876), ((4, 2), 7587.408023579574), ((4, 0), 7588.029367390866), ((3, 0), 7591.9328531265255), ((1, 1), 7595.437259910496), ((2, 0), 7619.4787994317485), ((1, 0), 7682.908739805367), ((0, 4), 8681.60204393116), ((0, 3), 8927.590315352254), ((0, 2), 9608.724979676586), ((0, 1), 10572.123910239005)]
We will try an ARMA(2,1) model and a ARMA (3,4) model:
First ARMA(2,1):
model = ARIMA(X, order=(2,0,1))
r = model.fit()
r.summary()
print('RMSE (in sample):',np.sqrt(np.mean(r.sigma2)))
RMSE (in sample): 1.1975193171875995
train1=train['VIXCM30_Premium'].dropna().tolist()
arima_model = ARIMA(train1, order=(2,0,1))
model_fit = arima_model.fit()
We can check to see if there is any information left in the residuals:
run_ljung_box(model_fit.resid,lags=40)
Ljung-Box Test lag: 5 | p_value: 0.0245 | still has information lag: 6 | p_value: 0.0342 | still has information lag: 7 | p_value: 0.0024 | still has information lag: 8 | p_value: 0.0006 | still has information lag: 9 | p_value: 0.0005 | still has information lag: 10 | p_value: 0.0000 | still has information lag: 11 | p_value: 0.0000 | still has information lag: 12 | p_value: 0.0000 | still has information lag: 13 | p_value: 0.0000 | still has information lag: 14 | p_value: 0.0000 | still has information lag: 15 | p_value: 0.0000 | still has information lag: 16 | p_value: 0.0000 | still has information lag: 17 | p_value: 0.0000 | still has information lag: 18 | p_value: 0.0000 | still has information lag: 19 | p_value: 0.0000 | still has information lag: 20 | p_value: 0.0000 | still has information lag: 21 | p_value: 0.0000 | still has information lag: 22 | p_value: 0.0000 | still has information lag: 23 | p_value: 0.0000 | still has information lag: 24 | p_value: 0.0000 | still has information lag: 25 | p_value: 0.0000 | still has information lag: 26 | p_value: 0.0000 | still has information lag: 27 | p_value: 0.0000 | still has information lag: 28 | p_value: 0.0000 | still has information lag: 29 | p_value: 0.0000 | still has information lag: 30 | p_value: 0.0000 | still has information lag: 31 | p_value: 0.0000 | still has information lag: 32 | p_value: 0.0000 | still has information lag: 33 | p_value: 0.0000 | still has information lag: 34 | p_value: 0.0000 | still has information lag: 35 | p_value: 0.0000 | still has information lag: 36 | p_value: 0.0000 | still has information lag: 37 | p_value: 0.0000 | still has information lag: 38 | p_value: 0.0000 | still has information lag: 39 | p_value: 0.0000 | still has information lag: 40 | p_value: 0.0000 | still has information
Unfortunately, there is still a lot of data left in the residuals. We will check to see how this model performs out of sample regardless and then we will check to see if a model with a different order might capture more of this residual information.
H = train['VIXCM30_Premium'].tolist()
ARIMA_ORDER = (2,0,1)
ARMA_prem4 = []
ARMA_PRED4=[]
VIX = []
ACTUAL_FUTURE4=[]
ACTUAL_prem4 = []
n = 0
for d,row in train.tail(1).iterrows():
VIX +=[row['VIX']]
for d,row in test.iterrows():
arima_model = ARIMA(H, order=ARIMA_ORDER)
arima_fit = arima_model.fit()
y_hat = arima_fit.predict(start=len(H), end=len(H) + 5)[0]
ARMA_prem4 += [y_hat]
ARMA_PRED4+=[y_hat+VIX[-1]]
VIX+=[row['VIX']]
H += [row['VIXCM30_Premium']]
ACTUAL_prem4 += [row['VIXCM30_Premium']]
ACTUAL_FUTURE4+=[row['VIXCM30']]
n += 1
# plotting the first 200 points
plt.plot(ARMA_PRED4[:200], label='predicted')
plt.plot(ACTUAL_FUTURE4[:200], color='b', label='actual')
plt.legend()
<matplotlib.legend.Legend at 0x1c2f5335c0>
rolling_ar_errors = measure_error(ACTUAL_FUTURE4, ARMA_PRED4, label='Rolling ARIMA(%d,%d,%d)' % ARIMA_ORDER)
RMSE = 0.8369772457435734 RMSE_pcent = 0.047440847815554896 label = Rolling ARIMA(2,0,1)
ar_cm = plot_confusion_matrix(ACTUAL_FUTURE4, ARMA_PRED4)
Confusion Matrix Raw [[193 115] [181 99]] Normalized [[0.32823129 0.19557823] [0.30782313 0.16836735]]
make_money(ACTUAL_FUTURE4, ARMA_PRED4)
Total profit or loss: 1.5630 Number of correct days: 294 total incorrect days: 294
Now let's try our ARMA (3,4) model to see if we can solve the issue of the information that is remaining in the residuals:
model2 = ARIMA(X, order=(3,0,4))
r2 = model2.fit()
r2.summary()
print('RMSE (in sample):',np.sqrt(np.mean(r2.sigma2)))
RMSE (in sample): 1.1897402525654754
arima_model = ARIMA(train1, order=(3,0,4))
model_fit2 = arima_model.fit()
run_ljung_box(r2.resid,lags=40)
Ljung-Box Test lag: 10 | p_value: 0.0000 | still has information lag: 11 | p_value: 0.0001 | still has information lag: 12 | p_value: 0.0001 | still has information lag: 13 | p_value: 0.0001 | still has information lag: 14 | p_value: 0.0001 | still has information lag: 15 | p_value: 0.0000 | still has information lag: 16 | p_value: 0.0001 | still has information lag: 17 | p_value: 0.0001 | still has information lag: 18 | p_value: 0.0001 | still has information lag: 19 | p_value: 0.0002 | still has information lag: 20 | p_value: 0.0003 | still has information lag: 21 | p_value: 0.0002 | still has information lag: 22 | p_value: 0.0003 | still has information lag: 23 | p_value: 0.0005 | still has information lag: 24 | p_value: 0.0004 | still has information lag: 25 | p_value: 0.0005 | still has information lag: 26 | p_value: 0.0008 | still has information lag: 27 | p_value: 0.0005 | still has information lag: 28 | p_value: 0.0007 | still has information lag: 29 | p_value: 0.0004 | still has information lag: 30 | p_value: 0.0004 | still has information lag: 31 | p_value: 0.0002 | still has information lag: 32 | p_value: 0.0003 | still has information lag: 33 | p_value: 0.0000 | still has information lag: 34 | p_value: 0.0000 | still has information lag: 35 | p_value: 0.0000 | still has information lag: 36 | p_value: 0.0000 | still has information lag: 37 | p_value: 0.0000 | still has information lag: 38 | p_value: 0.0000 | still has information lag: 39 | p_value: 0.0000 | still has information lag: 40 | p_value: 0.0000 | still has information
H = train['VIXCM30_Premium'].tolist()
ARIMA_ORDER = (3,0,4)
ARMA_prem = []
ARMA_PRED=[]
VIX = []
ACTUAL_FUTURE=[]
ACTUAL_prem = []
n = 0
for d,row in train.tail(1).iterrows():
VIX +=[row['VIX']]
for d,row in test.iterrows():
arima_model = ARIMA(H, order=ARIMA_ORDER)
arima_fit = arima_model.fit()
y_hat = arima_fit.predict(start=len(H), end=len(H) + 5)[0]
ARMA_prem += [y_hat]
ARMA_PRED+=[y_hat+VIX[-1]]
VIX+=[row['VIX']]
H += [row['VIXCM30_Premium']]
ACTUAL_prem += [row['VIXCM30_Premium']]
ACTUAL_FUTURE+=[row['VIXCM30']]
n += 1
# plotting the first 200 points
plt.plot(ARMA_PRED[:200], label='predicted')
plt.plot(ACTUAL_FUTURE[:200], color='b', label='actual')
plt.legend()
<matplotlib.legend.Legend at 0x1c2e3019b0>
rolling_ar_errors2 = measure_error(ACTUAL_FUTURE, ARMA_PRED, label='Rolling ARIMA(%d,%d,%d)' % ARIMA_ORDER)
RMSE = 0.8361463719379066 RMSE_pcent = 0.04739375291785126 label = Rolling ARIMA(3,0,4)
ar_cm2 = plot_confusion_matrix(ACTUAL_FUTURE, ARMA_PRED)
Confusion Matrix Raw [[185 123] [165 115]] Normalized [[0.31462585 0.20918367] [0.28061224 0.19557823]]
make_money(ACTUAL_FUTURE, ARMA_PRED)
Total profit or loss: 15.6848 Number of correct days: 302 total incorrect days: 286
| data | model | First lag in Ljung Box | RMSE_Percent | PnL |
|---|---|---|---|---|
| VIXCM30_Premium | ARMA(2,1) | 5 | 0.04744 | +\$ 1.56300 |
| VIXCM30_Premium | ARMA(3,4) | 10 | 0.04739 | +\$ 15.6848 |
We can see that this order model also has information left in the lagged residuals (but starting at 10 rather than 5) and it also performs slightly better than the ARMA(2,1) model. This makes us a little cautious, but let's check if this problem persists in the ARMA models for the other Premium variables as well. We will spot check with the VIXCM60 Premium.
First let's take a look at the PACF and ACF:
X2= train['VIXCM60_Premium']
auto = acf(X2)
print(auto)
y = plot_acf(X2, lags=60)
plt.show()
y = plot_pacf(X2, lags=40)
plt.show()
[1. 0.95290316 0.92320006 0.90125888 0.88073219 0.86684725 0.8488799 0.8317255 0.82183554 0.80894865 0.79688858 0.77657838 0.75628295 0.74004291 0.72610523 0.70945886 0.69358428 0.67398217 0.6540956 0.63959913 0.62170176 0.6032417 0.59016855 0.57442543 0.56071146 0.54849787 0.53332192 0.52152519 0.50556092 0.48949031 0.47956356 0.46535086 0.44882695 0.43283906 0.41861135 0.41088356 0.40227172 0.39214396 0.38157287 0.36778869 0.35501799]
We will run a grid search to help us decide what p and q values to use in our model:
grid_result = run_grid_over_arma(X2, p_lags=list(range(5)), q_lags=range(5))
['p(0)', 'n/a', '11908.22', '10764.37', ' 9956.62', ' 9580.24'] ['p(1)', ' 8299.45', ' 8222.75', ' 8208.13', ' 8213.54', ' 8213.48'] ['p(2)', ' 8240.40', ' 8202.76', ' 8210.53', ' 8218.27', ' 8219.33'] ['p(3)', ' 8220.75', ' 8210.53', ' 8217.39', ' 8224.26', ' 8211.52'] ['p(4)', ' 8223.52', ' 8218.26', ' 8223.89', 'n/a', 'n/a'] +------+----------+----------+----------+----------+----------+ | p\q | q (0) | q (1) | q (2) | q (3) | q (4) | +------+----------+----------+----------+----------+----------+ | p(0) | n/a | 11908.22 | 10764.37 | 9956.62 | 9580.24 | | p(1) | 8299.45 | 8222.75 | 8208.13 | 8213.54 | 8213.48 | | p(2) | 8240.40 | 8202.76 | 8210.53 | 8218.27 | 8219.33 | | p(3) | 8220.75 | 8210.53 | 8217.39 | 8224.26 | 8211.52 | | p(4) | 8223.52 | 8218.26 | 8223.89 | n/a | n/a | +------+----------+----------+----------+----------+----------+ bad values: [(0, 0), (4, 3), (4, 4)] time required 11.767310857772827
b = sorted(list(grid_result.items()), key=lambda k: k[1])
print(b)
[((2, 1), 8202.764414557952), ((1, 2), 8208.130009167802), ((2, 2), 8210.527132456586), ((3, 1), 8210.527162769662), ((3, 4), 8211.523492331788), ((1, 4), 8213.482916654084), ((1, 3), 8213.537119955572), ((3, 2), 8217.394445200434), ((4, 1), 8218.258294896628), ((2, 3), 8218.268074030846), ((2, 4), 8219.32718617729), ((3, 0), 8220.754381057428), ((1, 1), 8222.75177438938), ((4, 0), 8223.519608762306), ((4, 2), 8223.885593759196), ((3, 3), 8224.257522342912), ((2, 0), 8240.398729208286), ((1, 0), 8299.445989273678), ((0, 4), 9580.237456651997), ((0, 3), 9956.617795085402), ((0, 2), 10764.365937165772), ((0, 1), 11908.215104282828)]
Again we will try an ARMA(2,1) model:
model = ARIMA(X2, order=(2,0,1))
r = model.fit()
r.summary()
print('RMSE (in sample):',np.sqrt(np.mean(r.sigma2)))
RMSE (in sample): 1.3711005884788794
train2=train['VIXCM60_Premium'].dropna().tolist()
arima_model2 = ARIMA(train2, order=(2,0,1))
model_fit2 = arima_model2.fit()
We can check to see if there is any information left in the residuals:
run_ljung_box(model_fit2.resid,lags=40)
Ljung-Box Test lag: 7 | p_value: 0.0113 | still has information lag: 8 | p_value: 0.0072 | still has information lag: 9 | p_value: 0.0108 | still has information lag: 10 | p_value: 0.0000 | still has information lag: 11 | p_value: 0.0000 | still has information lag: 12 | p_value: 0.0000 | still has information lag: 13 | p_value: 0.0000 | still has information lag: 14 | p_value: 0.0000 | still has information lag: 15 | p_value: 0.0000 | still has information lag: 16 | p_value: 0.0000 | still has information lag: 17 | p_value: 0.0000 | still has information lag: 18 | p_value: 0.0000 | still has information lag: 19 | p_value: 0.0000 | still has information lag: 20 | p_value: 0.0000 | still has information lag: 21 | p_value: 0.0000 | still has information lag: 22 | p_value: 0.0000 | still has information lag: 23 | p_value: 0.0000 | still has information lag: 24 | p_value: 0.0000 | still has information lag: 25 | p_value: 0.0000 | still has information lag: 26 | p_value: 0.0000 | still has information lag: 27 | p_value: 0.0000 | still has information lag: 28 | p_value: 0.0000 | still has information lag: 29 | p_value: 0.0000 | still has information lag: 30 | p_value: 0.0000 | still has information lag: 31 | p_value: 0.0000 | still has information lag: 32 | p_value: 0.0000 | still has information lag: 33 | p_value: 0.0000 | still has information lag: 34 | p_value: 0.0000 | still has information lag: 35 | p_value: 0.0000 | still has information lag: 36 | p_value: 0.0000 | still has information lag: 37 | p_value: 0.0000 | still has information lag: 38 | p_value: 0.0000 | still has information lag: 39 | p_value: 0.0000 | still has information lag: 40 | p_value: 0.0000 | still has information
Unfortunately, there is still a lot of data left in the residuals. But let's just check to see how the ARMA performs out of sample regardless.
H = train['VIXCM60_Premium'].tolist()
ARIMA_ORDER = (2,0,1)
ARMA_prem2 = []
ARMA_PRED2=[]
VIX = []
ACTUAL_FUTURE2=[]
ACTUAL_prem2 = []
n = 0
for d,row in train.tail(1).iterrows():
VIX +=[row['VIX']]
for d,row in test.iterrows():
arima_model = ARIMA(H, order=ARIMA_ORDER)
arima_fit = arima_model.fit()
y_hat = arima_fit.predict(start=len(H), end=len(H) + 5)[0]
ARMA_prem2 += [y_hat]
ARMA_PRED2+=[y_hat+VIX[-1]]
VIX+=[row['VIX']]
H += [row['VIXCM60_Premium']]
ACTUAL_prem2 += [row['VIXCM60_Premium']]
ACTUAL_FUTURE2+=[row['VIXCM60']]
n += 1
# plotting the first 200 points
plt.plot(ARMA_PRED2[:200], label='predicted')
plt.plot(ACTUAL_FUTURE2[:200], color='b', label='actual')
plt.legend()
<matplotlib.legend.Legend at 0x1c2d5ad5c0>
rolling_ar_errors = measure_error(ACTUAL_FUTURE2, ARMA_PRED2, label='Rolling ARIMA(%d,%d,%d)' % ARIMA_ORDER)
RMSE = 0.6519801144622868 RMSE_pcent = 0.03534142555932097 label = Rolling ARIMA(2,0,1)
ar_cm = plot_confusion_matrix(ACTUAL_FUTURE2, ARMA_PRED2)
Confusion Matrix Raw [[185 116] [184 103]] Normalized [[0.31462585 0.19727891] [0.31292517 0.17517007]]
make_money(ACTUAL_FUTURE2, ARMA_PRED2)
Total profit or loss: -2.8920 Number of correct days: 288 total incorrect days: 300
Now let's try our ARMA (3,4) model to see if we can solve the issue of the information that is remaining in the residuals:
model2 = ARIMA(X, order=(3,0,4))
r2 = model2.fit()
r2.summary()
print('RMSE (in sample):',np.sqrt(np.mean(r2.sigma2)))
RMSE (in sample): 1.1897402525654754
arima_model = ARIMA(train1, order=(3,0,4))
model_fit2 = arima_model.fit()
run_ljung_box(r2.resid,lags=40)
Ljung-Box Test lag: 10 | p_value: 0.0000 | still has information lag: 11 | p_value: 0.0001 | still has information lag: 12 | p_value: 0.0001 | still has information lag: 13 | p_value: 0.0001 | still has information lag: 14 | p_value: 0.0001 | still has information lag: 15 | p_value: 0.0000 | still has information lag: 16 | p_value: 0.0001 | still has information lag: 17 | p_value: 0.0001 | still has information lag: 18 | p_value: 0.0001 | still has information lag: 19 | p_value: 0.0002 | still has information lag: 20 | p_value: 0.0003 | still has information lag: 21 | p_value: 0.0002 | still has information lag: 22 | p_value: 0.0003 | still has information lag: 23 | p_value: 0.0005 | still has information lag: 24 | p_value: 0.0004 | still has information lag: 25 | p_value: 0.0005 | still has information lag: 26 | p_value: 0.0008 | still has information lag: 27 | p_value: 0.0005 | still has information lag: 28 | p_value: 0.0007 | still has information lag: 29 | p_value: 0.0004 | still has information lag: 30 | p_value: 0.0004 | still has information lag: 31 | p_value: 0.0002 | still has information lag: 32 | p_value: 0.0003 | still has information lag: 33 | p_value: 0.0000 | still has information lag: 34 | p_value: 0.0000 | still has information lag: 35 | p_value: 0.0000 | still has information lag: 36 | p_value: 0.0000 | still has information lag: 37 | p_value: 0.0000 | still has information lag: 38 | p_value: 0.0000 | still has information lag: 39 | p_value: 0.0000 | still has information lag: 40 | p_value: 0.0000 | still has information
H = train['VIXCM60_Premium'].tolist()
ARIMA_ORDER = (3,0,4)
ARMA_prem = []
ARMA_PRED=[]
VIX = []
ACTUAL_FUTURE=[]
ACTUAL_prem = []
n = 0
for d,row in train.tail(1).iterrows():
VIX +=[row['VIX']]
for d,row in test.iterrows():
arima_model = ARIMA(H, order=ARIMA_ORDER)
arima_fit = arima_model.fit()
y_hat = arima_fit.predict(start=len(H), end=len(H) + 5)[0]
ARMA_prem += [y_hat]
ARMA_PRED+=[y_hat+VIX[-1]]
VIX+=[row['VIX']]
H += [row['VIXCM60_Premium']]
ACTUAL_prem += [row['VIXCM60_Premium']]
ACTUAL_FUTURE+=[row['VIXCM60']]
n += 1
# plotting the first 200 points
plt.plot(ARMA_PRED[:200], label='predicted')
plt.plot(ACTUAL_FUTURE[:200], color='b', label='actual')
plt.legend()
<matplotlib.legend.Legend at 0x1c3449e668>
rolling_ar_errors2 = measure_error(ACTUAL_FUTURE, ARMA_PRED, label='Rolling ARIMA(%d,%d,%d)' % ARIMA_ORDER)
RMSE = 0.6562020202035773 RMSE_pcent = 0.03557027942183079 label = Rolling ARIMA(3,0,4)
ar_cm2 = plot_confusion_matrix(ACTUAL_FUTURE, ARMA_PRED)
Confusion Matrix Raw [[190 111] [184 103]] Normalized [[0.32312925 0.18877551] [0.31292517 0.17517007]]
make_money(ACTUAL_FUTURE, ARMA_PRED)
Total profit or loss: -6.8196 Number of correct days: 293 total incorrect days: 295
| data | model | First lag in Ljung Box | RMSE_Percent | PnL |
|---|---|---|---|---|
| VIXCM60_Premium | ARMA(2,1) | 7 | 0.03534 | -\$ 2.8920 |
| VIXCM60_Premium | ARMA(3,4) | 10 | 0.03557 | -\$ 6.8196 |
Again we see that when using the ARMA model, we find there is still information left in the residuals. This makes us concerned that our ARMA models are not properly capturing all of the information available, but we feel a bit more confident that our AR models can. Because of this, and the fact that our AR models look to be making us more money, we will choose to use the AR models to develop our strategy, rather than continue running more ARMA models (which is great since we prefer simpler models and ARMA models take much longer to run).
We decided to check how our PnL might change if we allow the sale or purchase of any given future to take over the 5 next days, rather than requiring this action to happen the very next day. To do this, we created a new function, shown below, where the reccommendation (buy +1 or sell -1) follows the same strategy as before, buy if VIXCM30 is predicted to be higher tomorrow, sell if it is predicted to be lower. Then we will multiply the direction by $Future Price_{t*}$ - $Future Price_{t}$. Where $t*$ is the optimal day (max or min) over the next 5 trading days.
For example, if the VIXCM30 was 20 today and the predicted VIXCM30 over the next 5 days were 21,22,24,19,20 respectively, our strategy would tells us to buy (+1) since 20<21 and then we would sell in 3 days because this is the date with the predicted highest VIXCM30 price (24). Then let's day the real price on that 3rd say ends up being 23. Our PnL would be (+1)*(23-20) = +3.
def make_money_week(actual,predicted):
d = {'Predicted': predicted,
'Actual': actual}
PnL_df = pd.DataFrame(d)
Total_PnL =0
direction=0
correct=0
incorrect=0
for i in range(0, (len(PnL_df)-1)):
if PnL_df['Predicted'][i+1][0]>PnL_df['Actual'][i][0]:
direction=1
best_day=max(PnL_df['Predicted'][i+1])
pos=np.unravel_index(np.argmax((PnL_df['Predicted'][i+1]), axis=None), (PnL_df['Predicted'][i+1]).shape)
actual_sell=PnL_df['Actual'][i][pos]
else:
direction=-1
best_day=min(PnL_df['Predicted'][i+1])
pos=np.unravel_index(np.argmin((PnL_df['Predicted'][i+1]), axis=None), (PnL_df['Predicted'][i+1]).shape)
actual_sell=PnL_df['Actual'][i][pos]
daily_PnL= direction*(actual_sell-PnL_df['Actual'][i][0])
Total_PnL+=daily_PnL
if daily_PnL>=0:
correct+=1
else:
incorrect+=1
print('Total profit or loss: %.4f' % (Total_PnL))
print('Number of correct days: %d' % (correct))
print('total incorrect days: %d' % (incorrect))
ACTUAL_LIST_week=[]
for i in range(0,(len(test)-4)):
new=[test.VIXCM30.iloc[i],test.VIXCM30.iloc[i+1],
test.VIXCM30.iloc[i+2],test.VIXCM30.iloc[i+3],test.VIXCM30.iloc[i+4]]
new=np.asarray(new)
ACTUAL_LIST_week+=[new]
H = train['VIXCM30_Premium'].tolist()
AR_ORDER = 49
AR_PRED_week_lists=[]
VIX=[]
for d,row in train.tail(1).iterrows():
VIX+=[row['VIX']]
for d,row in test.iterrows():
ar_model = AR(H)
ar_fit = ar_model.fit(maxlag=AR_ORDER)
y_hat = ar_fit.predict(start=len(H), end=len(H) + 4)
pred_list=[]
pred=y_hat+VIX[-1]
pred_list.append(pred)
AR_PRED_week_lists+=pred_list
VIX+=[row['VIX']]
H += [row['VIXCM30_Premium']]
make_money_week(ACTUAL_LIST_week,AR_PRED_week_lists[:-4])
Total profit or loss: 20.5517 Number of correct days: 336 total incorrect days: 248
ACTUAL_LIST_week60=[]
for i in range(0,(len(test)-4)):
new=[test.VIXCM60.iloc[i],test.VIXCM60.iloc[i+1],
test.VIXCM60.iloc[i+2],test.VIXCM60.iloc[i+3],test.VIXCM60.iloc[i+4]]
new=np.asarray(new)
ACTUAL_LIST_week60+=[new]
H = train['VIXCM60_Premium'].tolist()
AR_ORDER = 35
AR_PRED_week_lists60=[]
VIX=[]
for d,row in train.tail(1).iterrows():
VIX+=[row['VIX']]
for d,row in test.iterrows():
ar_model = AR(H)
ar_fit = ar_model.fit(maxlag=AR_ORDER)
y_hat = ar_fit.predict(start=len(H), end=len(H) + 4)
pred_list=[]
pred=y_hat+VIX[-1]
pred_list.append(pred)
AR_PRED_week_lists60+=pred_list
VIX+=[row['VIX']]
H += [row['VIXCM60_Premium']]
make_money_week(ACTUAL_LIST_week60,AR_PRED_week_lists60[:-4])
Total profit or loss: 8.9814 Number of correct days: 349 total incorrect days: 235
ACTUAL_LIST_week90=[]
for i in range(0,(len(test)-4)):
new=[test.VIXCM90.iloc[i],test.VIXCM90.iloc[i+1],
test.VIXCM90.iloc[i+2],test.VIXCM90.iloc[i+3],test.VIXCM90.iloc[i+4]]
new=np.asarray(new)
ACTUAL_LIST_week90+=[new]
H = train['VIXCM90_Premium'].tolist()
AR_ORDER = 35
AR_PRED_week_lists90=[]
VIX=[]
for d,row in train.tail(1).iterrows():
VIX+=[row['VIX']]
for d,row in test.iterrows():
ar_model = AR(H)
ar_fit = ar_model.fit(maxlag=AR_ORDER)
y_hat = ar_fit.predict(start=len(H), end=len(H) + 4)
pred_list=[]
pred=y_hat+VIX[-1]
pred_list.append(pred)
AR_PRED_week_lists90+=pred_list
VIX+=[row['VIX']]
H += [row['VIXCM90_Premium']]
make_money_week(ACTUAL_LIST_week90,AR_PRED_week_lists90[:-4])
Total profit or loss: 12.4234 Number of correct days: 348 total incorrect days: 236
ACTUAL_LIST_week120=[]
for i in range(0,(len(test)-4)):
new=[test.VIXCM120.iloc[i],test.VIXCM120.iloc[i+1],
test.VIXCM120.iloc[i+2],test.VIXCM120.iloc[i+3],test.VIXCM120.iloc[i+4]]
new=np.asarray(new)
ACTUAL_LIST_week120+=[new]
H = train['VIXCM120_Premium'].tolist()
AR_ORDER = 35
AR_PRED_week_lists120=[]
VIX=[]
for d,row in train.tail(1).iterrows():
VIX+=[row['VIX']]
for d,row in test.iterrows():
ar_model = AR(H)
ar_fit = ar_model.fit(maxlag=AR_ORDER)
y_hat = ar_fit.predict(start=len(H), end=len(H) + 4)
pred_list=[]
pred=y_hat+VIX[-1]
pred_list.append(pred)
AR_PRED_week_lists120+=pred_list
VIX+=[row['VIX']]
H += [row['VIXCM120_Premium']]
make_money_week(ACTUAL_LIST_week120,AR_PRED_week_lists120[:-4])
Total profit or loss: 4.5694 Number of correct days: 351 total incorrect days: 233
ACTUAL_LIST_week150=[]
for i in range(0,(len(test)-4)):
new=[test.VIXCM150.iloc[i],test.VIXCM150.iloc[i+1],
test.VIXCM150.iloc[i+2],test.VIXCM150.iloc[i+3],test.VIXCM150.iloc[i+4]]
new=np.asarray(new)
ACTUAL_LIST_week150+=[new]
H = train['VIXCM150_Premium'].tolist()
AR_ORDER = 49
AR_PRED_week_lists150=[]
VIX=[]
for d,row in train.tail(1).iterrows():
VIX+=[row['VIX']]
for d,row in test.iterrows():
ar_model = AR(H)
ar_fit = ar_model.fit(maxlag=AR_ORDER)
y_hat = ar_fit.predict(start=len(H), end=len(H) + 4)
pred_list=[]
pred=y_hat+VIX[-1]
pred_list.append(pred)
AR_PRED_week_lists150+=pred_list
VIX+=[row['VIX']]
H += [row['VIXCM150_Premium']]
make_money_week(ACTUAL_LIST_week150,AR_PRED_week_lists150[:-4])
Total profit or loss: 10.3753 Number of correct days: 363 total incorrect days: 221
| data | model | PnL (over a week) |
|---|---|---|
| VIXCM30_Premium | AR(49) | +\$ 20.5517 |
| VIXCM60_Premium | AR(35) | +\$ 8.9814 |
| VIXCM90_Premium | AR(35) | +\$ 12.4234 |
| VIXCM120_Premium | AR(35) | +\$ 4.5694 |
| VIXCM150_Premium | AR(49) | +\$ 10.3753 |
Using this new strategy, we end up with higher profits for each model!
After trying ARCH, GARCH, VECM, AR, and ARMA models we came to find that our AR models resulted in the most profitable strategy. It is worth noting that the RSMEs for ARCH and GARCH models were the lowest, so they were the best fit, but these models were predicting the market volitality rather than the price of the futures. In regards to predicting the VIX futures' prices, AR models did the best, but they still didn't result in a strong edge. When revising our strategy to allow the sale or purchase of a future over the 5 next days, rather than requiring this action to happen the very next day, we improved our profits.